!22 add gpu transform module

Merge pull request !22 from tyBrave/master
This commit is contained in:
openharmony_ci 2023-02-23 02:59:33 +00:00 committed by Gitee
commit ef0de4fcdb
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
69 changed files with 4467 additions and 198 deletions

View File

@ -433,6 +433,9 @@ request.skipMemoryCache(true)
| request.sketchFilter() | SketchFilterTransformation | 素描滤波器 |
| request.mask() | MaskTransformation | 遮罩 |
| request.swirlFilter() | SwirlFilterTransformation | 扭曲滤波器 |
| request.kuwaharaFilter() | KuwaharaFilterTransform | 桑原滤波器 |
| request.toonFilter() | ToonFilterTransform | 动画滤波器 |
| request.vignetteFilter() | VignetteFilterTransform | 装饰滤波器 |
<img src="screenshot/gif4.gif" width="50%"/>

View File

@ -7,7 +7,7 @@
"name": "default",
"signingConfig": "default"
}
],
]
},
"modules": [
{
@ -25,6 +25,10 @@
{
"name": "imageknife",
"srcPath": "./imageknife"
},
{
"name": "gpu_transform",
"srcPath": "./gpu_transform"
}
]
}

View File

@ -22,8 +22,9 @@ import { CropCallback } from '@ohos/imageknife'
import { FileUtils } from '@ohos/imageknife'
@Component
@Entry
@Component
export struct CropImagePage2 {
@State options1: PixelMapCrop.Options = new PixelMapCrop.Options();
@State cropTap: boolean = false;

View File

@ -27,11 +27,11 @@ import { BrightnessFilterTransformation } from '@ohos/imageknife'
import { ContrastFilterTransformation } from '@ohos/imageknife'
import { InvertFilterTransformation } from '@ohos/imageknife'
import { SepiaFilterTransformation } from '@ohos/imageknife'
import {SketchFilterTransformation} from '@ohos/imageknife'
import {BlurTransformation} from '@ohos/imageknife'
import {PixelationFilterTransformation} from '@ohos/imageknife'
import {MaskTransformation} from '@ohos/imageknife'
import {SwirlFilterTransformation} from '@ohos/imageknife'
import { SketchFilterTransformation } from '@ohos/imageknife'
import { BlurTransformation } from '@ohos/imageknife'
import { PixelationFilterTransformation } from '@ohos/imageknife'
import { MaskTransformation } from '@ohos/imageknife'
import { SwirlFilterTransformation } from '@ohos/imageknife'
/**
@ -44,26 +44,29 @@ let mUrl = $r('app.media.pngSample');
@Entry
@Component
struct TransformPixelMapPage {
@State url: string= "";
@State mCropPixelMap: PixelMap= undefined;
@State mRoundPixelMap: PixelMap= undefined;
@State mCirclePixelMap: PixelMap= undefined;
@State mCircleBorderPixelMap: PixelMap= undefined;
@State mRotatePixelMap: PixelMap= undefined;
@State mSquarePixelMap: PixelMap= undefined;
@State mClipTopPixelMap: PixelMap= undefined;
@State mClipCenterPixelMap: PixelMap= undefined;
@State mClipBottomPixelMap: PixelMap= undefined;
@State mGrayscalePixelMap: PixelMap= undefined;
@State mBrightnessPixelMap: PixelMap= undefined;
@State mContrastPixelMap: PixelMap= undefined;
@State mInvertPixelMap: PixelMap= undefined;
@State mSepiaPixelMap: PixelMap= undefined;
@State mSketchPixelMap: PixelMap= undefined;
@State mBlurPixelMap: PixelMap= undefined;
@State mPixelPixelMap: PixelMap= undefined;
@State mSwirlPixelMap: PixelMap= undefined;
@State mMaskPixelMap: PixelMap= undefined;
@State url: string = "";
@State mCropPixelMap: PixelMap = undefined;
@State mRoundPixelMap: PixelMap = undefined;
@State mCirclePixelMap: PixelMap = undefined;
@State mCircleBorderPixelMap: PixelMap = undefined;
@State mRotatePixelMap: PixelMap = undefined;
@State mSquarePixelMap: PixelMap = undefined;
@State mClipTopPixelMap: PixelMap = undefined;
@State mClipCenterPixelMap: PixelMap = undefined;
@State mClipBottomPixelMap: PixelMap = undefined;
@State mGrayscalePixelMap: PixelMap = undefined;
@State mBrightnessPixelMap: PixelMap = undefined;
@State mContrastPixelMap: PixelMap = undefined;
@State mInvertPixelMap: PixelMap = undefined;
@State mSepiaPixelMap: PixelMap = undefined;
@State mSketchPixelMap: PixelMap = undefined;
@State mBlurPixelMap: PixelMap = undefined;
@State mPixelPixelMap: PixelMap = undefined;
@State mSwirlPixelMap: PixelMap = undefined;
@State mMaskPixelMap: PixelMap = undefined;
@State mKuwaharaPixelMap: PixelMap = undefined;
@State mToonPixelMap: PixelMap = undefined;
@State mVignettePixelMap: PixelMap = undefined;
build() {
Flex({ direction: FlexDirection.Column, alignItems: ItemAlign.Center }) {
@ -98,7 +101,7 @@ struct TransformPixelMapPage {
});
}.margin({ top: 10 })
Image(this.mCropPixelMap )
Image(this.mCropPixelMap)
.objectFit(ImageFit.None)
.width(100)
.height(100)
@ -151,7 +154,7 @@ struct TransformPixelMapPage {
});
}.margin({ top: 10 })
Image(this.mRoundPixelMap )
Image(this.mRoundPixelMap)
.objectFit(ImageFit.Fill)
.width(100)
.height(100)
@ -170,7 +173,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.circleTransformation();
});
Image(this.mCirclePixelMap )
Image(this.mCirclePixelMap)
.width(200)
.height(200)
.margin({ top: 10 })
@ -188,7 +191,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.circleBorderTransformation(5);
});
Image(this.mCircleBorderPixelMap )
Image(this.mCircleBorderPixelMap)
.width(200)
.height(200)
.margin({ top: 10 })
@ -210,7 +213,7 @@ struct TransformPixelMapPage {
}
this.transformRotate(mRotate);
});
Image(this.mRotatePixelMap )
Image(this.mRotatePixelMap)
.width(200)
.height(200)
.margin({ top: 10 })
@ -228,7 +231,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.transformSquare();
});
Image(this.mSquarePixelMap )
Image(this.mSquarePixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -266,17 +269,17 @@ struct TransformPixelMapPage {
}.margin({ top: 10 })
Row({ space: 1 }) {
Image(this.mClipTopPixelMap )
Image(this.mClipTopPixelMap)
.objectFit(ImageFit.Fill)
.width(100)
.height(100)
.margin({ top: 10 })
Image(this.mClipCenterPixelMap )
Image(this.mClipCenterPixelMap)
.objectFit(ImageFit.Fill)
.width(100)
.height(100)
.margin({ top: 10 })
Image(this.mClipBottomPixelMap )
Image(this.mClipBottomPixelMap)
.objectFit(ImageFit.Fill)
.width(100)
.height(100)
@ -295,7 +298,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.grayscalePixelMap();
});
Image(this.mGrayscalePixelMap )
Image(this.mGrayscalePixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -313,7 +316,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.brightnessPixelMap(0.8);
});
Image(this.mBrightnessPixelMap )
Image(this.mBrightnessPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -331,7 +334,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.contrastPixelMap(4);
});
Image(this.mContrastPixelMap )
Image(this.mContrastPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -349,7 +352,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.invertPixelMap();
});
Image(this.mInvertPixelMap )
Image(this.mInvertPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -368,7 +371,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.sepiaPixelMap();
});
Image(this.mSepiaPixelMap )
Image(this.mSepiaPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -386,7 +389,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.sketchPixelMap();
});
Image(this.mSketchPixelMap )
Image(this.mSketchPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -405,7 +408,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.blurHandlePixelMap(20);
});
Image(this.mBlurPixelMap )
Image(this.mBlurPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -425,7 +428,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.pixelHandlePixelMap(20);
});
Image(this.mPixelPixelMap )
Image(this.mPixelPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -444,7 +447,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.swirlHandlePixelMap();
});
Image(this.mSwirlPixelMap )
Image(this.mSwirlPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -463,7 +466,64 @@ struct TransformPixelMapPage {
.onClick(() => {
this.maskHandlePixelMap($r('app.media.mask_starfish'));
});
Image(this.mMaskPixelMap )
Image(this.mMaskPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
.margin({ top: 10 })
}.margin({ top: 10 });
Column() {
Text("KuwaharaFilterTransform").fontColor(Color.Gray).fontSize(16);
Button() {
Text("图片kuwahara").fontSize(13).fontColor(Color.White)
}
.height(35)
.width(120)
.margin({ top: 10 })
.onClick(() => {
this.kuwaharaHandlePixelMap();
});
Image(this.mKuwaharaPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
.margin({ top: 10 })
}.margin({ top: 10 });
Column() {
Text("ToonFilterTransform").fontColor(Color.Gray).fontSize(16);
Button() {
Text("图片toon").fontSize(13).fontColor(Color.White)
}
.height(35)
.width(120)
.margin({ top: 10 })
.onClick(() => {
this.toonHandlePixelMap();
});
Image(this.mToonPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
.margin({ top: 10 })
}.margin({ top: 10 });
Column() {
Text("VignetteFilterTransform").fontColor(Color.Gray).fontSize(16);
Button() {
Text("图片vignette").fontSize(13).fontColor(Color.White)
}
.height(35)
.width(120)
.margin({ top: 10 })
.onClick(() => {
this.vignetteHandlePixelMap();
});
Image(this.mVignettePixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -481,19 +541,19 @@ struct TransformPixelMapPage {
}
/**
/**
* centerCrop
*/
centerCrop() {
var imageKnifeOption = new RequestOption();
imageKnifeOption.load($r('app.media.jpgSample'))
// imageKnifeOption.load(mUrl)
// imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
this.mCropPixelMap = result;
setTimeout(() => {
let result2 = undefined;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCropPixelMap = result2;
}, 100)
return false;
@ -504,7 +564,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* centerInside
*/
centerInside() {
@ -515,7 +575,7 @@ struct TransformPixelMapPage {
this.mCropPixelMap = result;
setTimeout(() => {
let result2 = undefined;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCropPixelMap = result2;
}, 100)
return false;
@ -526,7 +586,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* centerInside
*/
fitCenter() {
@ -537,7 +597,7 @@ struct TransformPixelMapPage {
this.mCropPixelMap = result;
setTimeout(() => {
let result2 = undefined;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCropPixelMap = result2;
}, 100)
return false;
@ -547,11 +607,11 @@ struct TransformPixelMapPage {
.fitCenter();
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 圆角设置
*/
roundedCornersTransformation(top_left: number,
bottom_left: number, top_right: number, bottom_right: number) {
bottom_left: number, top_right: number, bottom_right: number) {
var imageKnifeOption = new RequestOption();
imageKnifeOption.load(mUrl)
@ -560,18 +620,23 @@ struct TransformPixelMapPage {
this.mRoundPixelMap = result;
setTimeout(() => {
let result2 = undefined;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
this.mRoundPixelMap = result2;
}, 100)
return false;
})
.setImageViewSize({ width: vp2px(100), height: vp2px(100) })
.skipMemoryCache(true)
.roundedCorners({ top_left: top_left, top_right: top_right, bottom_left: bottom_left, bottom_right: bottom_right })
.roundedCorners({
top_left: top_left,
top_right: top_right,
bottom_left: bottom_left,
bottom_right: bottom_right
})
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 裁剪圆
*/
circleTransformation() {
@ -579,7 +644,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCirclePixelMap = result;
return false;
})
@ -589,7 +654,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 圆环裁剪
*/
circleBorderTransformation(border: number) {
@ -599,7 +664,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCircleBorderPixelMap = result;
return false;
})
@ -610,7 +675,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 旋转
*/
transformRotate(angled: number) {
@ -619,7 +684,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mRotatePixelMap = result;
return false;
})
@ -629,7 +694,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 正方形裁剪
*/
transformSquare() {
@ -638,7 +703,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mSquarePixelMap = result;
return false;
})
@ -648,7 +713,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 区域裁剪
*/
clipPixelMap(width: number, height: number, cropType: CropType) {
@ -658,13 +723,13 @@ struct TransformPixelMapPage {
.addListener((err, data) => {
let result = undefined;
if (cropType == CropType.TOP) {
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mClipTopPixelMap = result;
} else if (cropType == CropType.CENTER) {
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mClipCenterPixelMap = result;
} else if (cropType == CropType.BOTTOM) {
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mClipBottomPixelMap = result;
}
return false;
@ -676,7 +741,7 @@ struct TransformPixelMapPage {
}
/**
/**
* 灰度
*/
grayscalePixelMap() {
@ -685,18 +750,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mGrayscalePixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.grayscale()
ImageKnife.call(imageKnifeOption);
}
/**
/**
*亮度b
*/
brightnessPixelMap(brightness: number) {
@ -705,18 +771,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mBrightnessPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.brightnessFilter(brightness)
ImageKnife.call(imageKnifeOption);
}
/**
/**
*对比度
*/
contrastPixelMap(contrast: number) {
@ -725,18 +792,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mContrastPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.contrastFilter(contrast)
ImageKnife.call(imageKnifeOption);
}
/**
/**
*反转处理
*/
invertPixelMap() {
@ -745,18 +813,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mInvertPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.invertFilter()
ImageKnife.call(imageKnifeOption);
}
/**
/**
*照片老旧出来(黑褐色)
*/
sepiaPixelMap() {
@ -765,18 +834,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mSepiaPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.sepiaFilter()
ImageKnife.call(imageKnifeOption);
}
/**
/**
*素描
*/
sketchPixelMap() {
@ -785,18 +855,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mSketchPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.sketchFilter()
ImageKnife.call(imageKnifeOption);
}
/**
/**
*模糊
*/
blurHandlePixelMap(radius: number) {
@ -805,17 +876,18 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mBlurPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.blur(radius)
ImageKnife.call(imageKnifeOption);
}
/**
/**
*马赛克
*/
pixelHandlePixelMap(pixel: number) {
@ -824,18 +896,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mPixelPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.pixelationFilter(pixel)
ImageKnife.call(imageKnifeOption);
}
/**
/**
*扭曲
*/
swirlHandlePixelMap() {
@ -844,18 +917,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mSwirlPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.swirlFilter(80)
// .diskCacheStrategy(new NONE())
ImageKnife.call(imageKnifeOption);
}
/**
/**
*遮罩
*/
maskHandlePixelMap(maskResource: Resource) {
@ -865,7 +939,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mMaskPixelMap = result;
return false;
})
@ -876,6 +950,69 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
*kuwahara
*/
kuwaharaHandlePixelMap() {
let imageKnifeOption = new RequestOption();
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mKuwaharaPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.kuwaharaFilter(20.0)
// .diskCacheStrategy(new NONE())
ImageKnife.call(imageKnifeOption);
}
/**
*toon
*/
toonHandlePixelMap() {
let imageKnifeOption = new RequestOption();
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mToonPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.toonFilter(0.2, 50.0);
// .diskCacheStrategy(new NONE())
ImageKnife.call(imageKnifeOption);
}
/**
*vignette
*/
vignetteHandlePixelMap() {
let imageKnifeOption = new RequestOption();
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mVignettePixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.enableGPU()
.vignetteFilter([0.5, 0.5], [0.0, 0.0, 0.0], [0.3, 0.5])
// .diskCacheStrategy(new NONE())
ImageKnife.call(imageKnifeOption);
}
}
var ImageKnife = globalThis.ImageKnife

4
gpu_transform/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
/node_modules
/.preview
/build
/.cxx

View File

@ -0,0 +1,16 @@
## 1.0.0
获取图片的buffer数据使用openGL、着色器Shader操作GPU达到图片滤波器的效果
- 支持模糊滤波器
- 支持亮度滤波器
- 支持颜色反转滤波器
- 支持对比度滤波器
- 支持灰色滤波器
- 支持桑原滤波器
- 支持马赛克滤波器
- 支持乌墨色滤波器
- 支持素描滤波器
- 支持扭曲滤波器
- 支持动画滤波器
- 支持装饰滤波器

201
gpu_transform/LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,11 @@
[
{
"Name": "android-gpuimage",
"License": " Apache License, Version 2.0",
"License File": "https://github.com/cats-oss/android-gpuimage",
"Version Number": "4.13.1",
"Owner" : "cats-oss",
"Upstream URL": "https://github.com/cats-oss/android-gpuimage",
"Description": "Android filters based on OpenGL (idea from GPUImage for iOS)"
}
]

79
gpu_transform/README.md Normal file
View File

@ -0,0 +1,79 @@
## gpu_transform
该module通过获取图片的buffer数据使用openGL、着色器Shader操作GPU达到图片滤波器的效果。
本项目基于开源库 [android-gpuimage](https://github.com/cats-oss/android-gpuimage) 进行OpenHarmony的自研版本
## 下载安装
```
npm install @ohos/gpu_transform --save
```
## 例子说明
```
//获取像素点数据
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
// 使用GPUImageVignetterFilter过滤器
let filter = new GPUImageVignetterFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setVignetteCenter(this.centerPoint);
filter.setVignetteColor(this.vignetteColor);
filter.setVignetteStart(this.vignetteSpace[0]);
filter.setVignetteEnd(this.vignetteSpace[1]);
//获取经过gpu处理的像素点数据
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight)
//像素点数据写入
bitmap.writeBufferToPixels(buf);
```
## 滤波器类型说明
| 滤波器类型 | 描述 |
| ---------------------------- | ---------------------------|
| GPUImageBlurFilter | 模糊滤波器 |
| GPUImageBrightnessFilter | 亮度滤波器 |
| GPUImageColorInvertFilter | 颜色反转滤波器 |
| GPUImageContrastFilter | 对比度滤波器 |
| GPUImageGrayscaleFilter | 灰色滤波器 |
| GPUImageKuwaharaFilter | 桑原滤波器 |
| GPUImagePixelationFilter | 马赛克滤波器 |
| GPUImageSepiaToneFilter | 乌墨色滤波器 |
| GPUImageSketchFilter | 素描滤波器 |
| GPUImageSwirlFilter | 扭曲滤波器 |
| GPUImageToonFilter | 动画滤波器 |
| GPUImageVignetterFilter | 装饰滤波器 |
## 目录结构
```
/gpu_transform/src/main
--cpp
--common # napi公共方法封装
--constant # 顶点、片元着色器
--napi # native入口
--render # 绘制
--util # 工具层
--ets
--filter # 各种滤波器
--gl # native的js层
--interface # 接口
```
## 兼容性
支持 OpenHarmony API version 9 及以上版本。
## 贡献代码
使用过程中发现任何问题都可以提 [issue](https://gitee.com/openharmony-tpc/ImageKnife/issues) 给我们,当然,我们也非常欢迎你给我们发 [PR](https://gitee.com/openharmony-tpc/ImageKnife/issues) 。
## 开源协议
本项目基于 [Apache License 2.0](https://gitee.com/openharmony-tpc/ImageKnife/blob/master/LICENSE) ,请自由的享受和参与开源。

View File

@ -0,0 +1,19 @@
{
"apiType": "stageMode",
"buildOption": {
"externalNativeOptions": {
"path": "./src/main/cpp/CMakeLists.txt",
"arguments": "",
"abiFilters": [
"armeabi-v7a",
"arm64-v8a"
],
"cppFlags": ""
},
},
"targets": [
{
"name": "default"
}
]
}

View File

@ -0,0 +1,2 @@
// Script for compiling build behavior. It is built in the build plug-in and cannot be modified currently.
export { harTasks } from '@ohos/hvigor-ohos-plugin';

33
gpu_transform/index.ets Normal file
View File

@ -0,0 +1,33 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export { GPUImage3x3TextureSamplingFilter } from './src/main/ets/gpu/filter/GPUImage3x3TextureSamplingFilter'
export { GPUImageBlurFilter } from './src/main/ets/gpu/filter/GPUImageBlurFilter'
export { GPUImageBrightnessFilter } from './src/main/ets/gpu/filter/GPUImageBrightnessFilter'
export { GPUImageColorInvertFilter } from './src/main/ets/gpu/filter/GPUImageColorInvertFilter'
export { GPUImageColorMatrixFilter } from './src/main/ets/gpu/filter/GPUImageColorMatrixFilter'
export { GPUImageContrastFilter } from './src/main/ets/gpu/filter/GPUImageContrastFilter'
export { GPUImageFilter } from './src/main/ets/gpu/filter/GPUImageFilter'
export { GPUImageFilterGroup } from './src/main/ets/gpu/filter/GPUImageFilterGroup'
export { GPUImageGrayscaleFilter } from './src/main/ets/gpu/filter/GPUImageGrayscaleFilter'
export { GPUImageKuwaharaFilter } from './src/main/ets/gpu/filter/GPUImageKuwaharaFilter'
export { GPUImagePixelationFilter } from './src/main/ets/gpu/filter/GPUImagePixelationFilter'
export { GPUImageSepiaToneFilter } from './src/main/ets/gpu/filter/GPUImageSepiaToneFilter'
export { GPUImageSketchFilter } from './src/main/ets/gpu/filter/GPUImageSketchFilter'
export { GPUImageSwirlFilter } from './src/main/ets/gpu/filter/GPUImageSwirlFilter'
export { GPUImageToonFilter } from './src/main/ets/gpu/filter/GPUImageToonFilter'
export { GPUImageVignetterFilter } from './src/main/ets/gpu/filter/GPUImageVignetterFilter'

View File

@ -0,0 +1,24 @@
{
"license":"Apache License 2.0",
"types":"",
"devDependencies":{},
"keywords":[
"OpenHarmony",
"transformation",
"gpu_transform"
],
"name":"@ohos/gpu_transform",
"description":"based on OpenHarmony system, it can quickly realize image blur, Mosaic, sketch and other transformation effects through GPU",
"author":"ohos_tpc",
"ohos":{
"org":"opensource"
},
"tags":[
"Tool"
],
"main":"index.ets",
"repository":"https://gitee.com/openharmony-tpc/ImageKnife",
"type":"module",
"version":"1.0.0",
"dependencies":{}
}

View File

@ -0,0 +1,33 @@
# the minimum version of CMake.
cmake_minimum_required(VERSION 3.4.1)
project(gpu_transform)
set(NATIVERENDER_ROOT_PATH ${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${NATIVERENDER_ROOT_PATH}
${NATIVERENDER_ROOT_PATH}/include
${NATIVERENDER_ROOT_PATH}/util
${NATIVERENDER_ROOT_PATH}/napi
${NATIVERENDER_ROOT_PATH}/common
${NATIVERENDER_ROOT_PATH}/render
${NATIVERENDER_ROOT_PATH}/constant
)
add_library(nativeGpu SHARED
${NATIVERENDER_ROOT_PATH}/napi/napi_init.cpp
${NATIVERENDER_ROOT_PATH}/render/EGLRender.cpp
${NATIVERENDER_ROOT_PATH}/util/GLUtils.cpp
${NATIVERENDER_ROOT_PATH}/util/NapiUtil.cpp
)
find_library (
hilog-lib
hilog_ndk.z )
find_library (
EGL-lib
EGL )
find_library (
GLES-lib
GLESv3 )
target_link_libraries(nativeGpu PUBLIC ${hilog-lib} ${EGL-lib} ${GLES-lib} libace_napi.z.so libc++.a)

View File

@ -0,0 +1,56 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/15.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#ifndef GPU_ImageETS_native_common_H
#define GPU_ImageETS_native_common_H
#define NAPI_RETVAL_NOTHING
#define GET_AND_THROW_LAST_ERROR(env) \
do { \
const napi_extended_error_info* errorInfo = nullptr; \
napi_get_last_error_info((env), &errorInfo); \
bool isPending = false; \
napi_is_exception_pending((env), &isPending); \
if (!isPending && errorInfo != nullptr) { \
const char* errorMessage = \
errorInfo->error_message != nullptr ? errorInfo->error_message : "empty error message"; \
napi_throw_error((env), nullptr, errorMessage); \
} \
} while (0)
#define DECLARE_NAPI_FUNCTION(name, func) \
{ \
(name), nullptr, (func), nullptr, nullptr, nullptr, napi_default, nullptr \
}
#define NAPI_CALL_BASE(env, theCall, retVal) \
do { \
if ((theCall) != napi_ok) { \
GET_AND_THROW_LAST_ERROR((env)); \
return retVal; \
} \
} while (0)
#define NAPI_CALL(env, theCall) NAPI_CALL_BASE(env, theCall, nullptr)
#endif // GPU_ImageETS_native_common_H

View File

@ -0,0 +1,475 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/16.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#ifndef GPU_ImageETS_constant_shape_H
#define GPU_ImageETS_constant_shape_H
const int UNIFORM_TYPE_2FV_SIZE = 10;
const int UNIFORM_TYPE_FV = 1;
const int UNIFORM_TYPE_2FV = 2;
const int UNIFORM_TYPE_3FV = 3;
const int UNIFORM_TYPE_4FV = 4;
const int UNIFORM_TYPE_2F = 21;
const int DEFAULT_ZERO = 0;
const int DEFAULT_ONE = 1;
const int DEFAULT_TWO = 2;
const int DEFAULT_THREE = 3;
const int DEFAULT_FOUR = 4;
const float DEFAULT_ONE_HALF = 1.5;
const int UNIFORM_TYPE_ZERO = 0;
const int UNIFORM_TYPE_ONE = 1;
const int UNIFORM_TYPE_THREE = 3;
const int UNIFORM_TYPE_FOUR = 4;
const int SHADER_TYPE_BRIGHT = 0;
const int SHADER_TYPE_CONTRAST = 1;
const int SHADER_TYPE_INVERT = 2;
const int SHADER_TYPE_PIXELATION = 3;
const int SHADER_TYPE_KUWAHARA = 4;
const int SHADER_TYPE_SEPIA = 5;
const int SHADER_TYPE_SKETCH = 6;
const int SHADER_TYPE_SWIRL = 7;
const int SHADER_TYPE_TOON = 8;
const int SHADER_TYPE_VIGNETTE = 9;
const int SHADER_TYPE_GRAYSCALE = 10;
const int SHADER_TYPE_BLUR = 12;
const char vShaderStr[] =
"#version 300 es \n"
"layout(location = 0) in vec4 a_position; \n"
"layout(location = 1) in vec2 a_texCoord; \n"
"out vec2 v_texCoord; \n"
"void main() \n"
"{ \n"
" gl_Position = a_position; \n"
" v_texCoord = a_texCoord; \n"
"} \n";
const char fShaderStr0[] =
"#version 300 es \n"
"precision mediump float; \n"
"in vec2 v_texCoord; \n"
"layout(location = 0) out vec4 outColor; \n"
"uniform sampler2D s_TextureMap; \n"
"void main() \n"
"{ \n"
"outColor = texture(s_TextureMap,v_texCoord); \n"
"} \n";
const char v3x3ShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"layout(location = 0) in vec4 a_position;\n"
"layout(location = 1) in vec2 a_texCoord;\n"
"uniform highp float texelWidth;\n"
"uniform highp float texelHeight;\n"
"out vec2 v_texCoord;\n"
"out vec2 leftTextureCoordinate;\n"
"out vec2 rightTextureCoordinate;\n"
"out vec2 topTextureCoordinate;\n"
"out vec2 topLeftTextureCoordinate;\n"
"out vec2 topRightTextureCoordinate;\n"
"out vec2 bottomTextureCoordinate;\n"
"out vec2 bottomLeftTextureCoordinate;\n"
"out vec2 bottomRightTextureCoordinate;\n"
"void main()\n"
"{\n"
"gl_Position = a_position;\n"
"vec2 widthStep = vec2(texelWidth, 0.0);\n"
"vec2 heightStep = vec2(0.0, texelHeight);\n"
"vec2 widthHeightStep = vec2(texelWidth, texelHeight);\n"
"vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);\n"
"v_texCoord = a_texCoord;\n"
"leftTextureCoordinate = a_texCoord - widthStep;\n"
"rightTextureCoordinate = a_texCoord + widthStep;\n"
"\n"
"topTextureCoordinate = a_texCoord - heightStep;\n"
"topLeftTextureCoordinate = a_texCoord - widthHeightStep;\n"
"topRightTextureCoordinate = a_texCoord + widthNegativeHeightStep;\n"
"\n"
"bottomTextureCoordinate = a_texCoord +heightStep;\n"
"bottomLeftTextureCoordinate = a_texCoord - widthNegativeHeightStep;\n"
"bottomRightTextureCoordinate = a_texCoord + widthHeightStep;\n"
"}";
// kuwahara
const char fShaderStr3[] =
"#version 300 es\n"
"precision highp float;\n"
"layout(location = 0) out vec4 outColor;\n"
"in vec2 v_texCoord;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform vec2 u_texSize;\n"
"uniform highp float radius;\n"
"const vec2 src_size = vec2(1.0 / 768.0, 1.0 / 1024.0);\n"
"void main() {\n"
"vec2 uv = v_texCoord;\n"
"float n = float((radius + 1.0) * (radius + 1.0));\n"
"int i ; int j ;\n"
"vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);\n"
"vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);\n"
"vec3 c;\n"
"for (j = -int(radius); j <=0; ++j) {\n"
"for (i = -int(radius); i <=0; ++i) {\n"
"c = texture(s_TextureMap,uv + vec2(i,j) * src_size).rgb;\n"
"m0 += c;\n"
"s0 += c * c;\n"
"}\n"
"}\n"
"for (j = -int(radius); j <=0; ++j) {\n"
"for (i =0; i <=int(radius); ++i) {\n"
"c = texture(s_TextureMap,uv + vec2(i,j) * src_size).rgb;\n"
"m1 += c;\n"
"s1 += c * c;\n"
"}\n"
"}\n"
"for (j = 0; j <=int(radius); ++j) {\n"
"for (i = 0; i <= int(radius); ++i) {\n"
"c = texture(s_TextureMap,uv + vec2(i,j) * src_size).rgb;\n"
"m2 += c;\n"
"s2 += c * c;\n"
"}\n"
"}\n"
"for (j = 0; j <=int(radius); ++j) {\n"
"for (i = -int(radius); i <= 0; ++i) {\n"
"c = texture(s_TextureMap,uv + vec2(i,j) * src_size).rgb;\n"
"m3 += c;\n"
"s3 += c * c;\n"
"}\n"
"}\n"
"\n"
"\n"
"float min_sigma2 = 1e+2;\n"
"m0 /= n;\n"
"s0 = abs(s0 /n - m0 * m0);\n"
"\n"
"float sigma2 = s0.r + s0.g + s0.b;\n"
"if (sigma2 < min_sigma2) {\n"
"min_sigma2 = sigma2;\n"
"outColor = vec4(m0,1.0);\n"
"}\n"
"\n"
"m1 /= n;\n"
"s1 = abs(s1 / n -m1 * m1);\n"
"\n"
"sigma2 = s1.r + s1.g + s1.b;\n"
"if (sigma2 < min_sigma2) {\n"
"min_sigma2 = sigma2;\n"
"outColor = vec4(m1,1.0);\n"
"}\n"
"\n"
"m2 /= n;\n"
"s2 = abs(s2 / n -m2 * m2);\n"
"\n"
"sigma2 = s2.r + s2.g + s2.b;\n"
"if (sigma2 < min_sigma2) {\n"
"min_sigma2 = sigma2;\n"
"outColor = vec4(m2,1.0);\n"
"}\n"
"\n"
"m3 /= n;\n"
"s3 = abs(s3 / n -m3 * m3);\n"
"\n"
"sigma2 = s3.r + s3.g + s3.b;\n"
"if (sigma2 < min_sigma2) {\n"
"min_sigma2 = sigma2;\n"
"outColor = vec4(m3,1.0);\n"
"}\n"
"}\n";
// 旋转
const char swirlFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"layout(location = 0) out vec4 outColor;\n"
"in vec2 v_texCoord;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform vec2 u_texSize;\n"
"uniform highp float radius;\n"
"uniform highp float angle;\n"
"uniform vec2 center;\n"
"void main() {\n"
"vec2 tc = v_texCoord * u_texSize;\n"
"tc -= center;\n"
"float dist = length(tc);\n"
"if (dist < radius) {\n"
"float percent = (radius - dist) / radius;\n"
"float theta = percent * percent * angle * 8.0;\n"
"float s = sin(theta);\n"
"float c = cos(theta);\n"
"tc = vec2(dot(tc, vec2(c, -s)), dot(tc, vec2(s, c)));\n"
"}\n"
"tc += center;\n"
"outColor = texture(s_TextureMap, tc / u_texSize);\n"
"}";
// 亮度
const char brightFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform lowp float brightness;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"\n"
"outColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);\n"
"}";
// 反转
const char contrastFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform lowp float contrast;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"\n"
"outColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);\n"
"}";
// invert
const char invertFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"\n"
"uniform lowp sampler2D s_TextureMap;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"\n"
"outColor = vec4((1.0 - textureColor.rgb), textureColor.w);\n"
"}";
// pixel
const char pixelFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"uniform float imageWidthFactor;\n"
"uniform float imageHeightFactor;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform float pixel;\n"
"layout(location = 0) out vec4 outColor;\n"
"void main()\n"
"{\n"
"vec2 uv = v_texCoord.xy;\n"
"float dx = pixel * imageWidthFactor;\n"
"float dy = pixel * imageHeightFactor;\n"
"vec2 coord = vec2(dx * floor(uv.x / dx), dy * floor(uv.y / dy));\n"
"vec3 tc = texture(s_TextureMap, coord).xyz;\n"
"outColor = vec4(tc, 1.0);\n"
"}";
// vignette
const char vignetteFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"in vec2 v_texCoord;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"uniform lowp vec2 vignetteCenter;\n"
"uniform lowp vec3 vignetteColor;\n"
"uniform highp float vignetteStart;\n"
"uniform highp float vignetteEnd;\n"
"\n"
"void main()\n"
"{\n"
"lowp vec3 rgb = texture(s_TextureMap, v_texCoord).rgb;\n"
"lowp float d = distance(v_texCoord, vec2(0.5,0.5));\n"
"rgb *= (1.0 - smoothstep(vignetteStart , vignetteEnd, d));\n"
"outColor = vec4(vec3(rgb), 1.0);\n"
"\n"
"lowp vec3 rgb2 = texture(s_TextureMap, v_texCoord).rgb;\n"
"lowp float d2 = distance(v_texCoord, vec2(vignetteCenter.x, vignetteCenter.y));\n"
"lowp float percent = smoothstep(vignetteStart, vignetteEnd, d2);\n"
"outColor = vec4(mix(rgb2.x,vignetteColor.x,percent), mix(rgb2.y, vignetteColor.y, percent), mix(rgb2.z, vignetteColor.z, percent), 1.0);\n"
"}";
// ColorMatrix
const char colorMatrixFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"uniform lowp mat4 colorMatrix;\n"
"uniform lowp float intensity;\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"lowp vec4 outputColor = textureColor * colorMatrix;\n"
"outColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);\n"
"}";
// toon
const char toonFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"layout(location = 0) out vec4 outColor;\n"
"in vec2 leftTextureCoordinate;\n"
"in vec2 rightTextureCoordinate;\n"
"in vec2 topTextureCoordinate;\n"
"in vec2 topLeftTextureCoordinate;\n"
"in vec2 topRightTextureCoordinate;\n"
"in vec2 bottomTextureCoordinate;\n"
"in vec2 bottomLeftTextureCoordinate;\n"
"in vec2 bottomRightTextureCoordinate;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform highp float intensity;\n"
"uniform highp float threshold;\n"
"uniform highp float quantizationLevels;\n"
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n"
"void main()\n"
"{\n"
"vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"float bottomLeftIntensity = texture(s_TextureMap, bottomLeftTextureCoordinate).r;\n"
"float topRightIntensity = texture(s_TextureMap, topRightTextureCoordinate).r;\n"
"float topLeftIntensity = texture(s_TextureMap, topLeftTextureCoordinate).r;\n"
"float bottomRightIntensity = texture(s_TextureMap, bottomRightTextureCoordinate).r;\n"
"float leftIntensity = texture(s_TextureMap, leftTextureCoordinate).r;\n"
"float rightIntensity = texture(s_TextureMap, rightTextureCoordinate).r;\n"
"float bottomIntensity = texture(s_TextureMap, bottomTextureCoordinate).r;\n"
"float topIntensity = texture(s_TextureMap, topTextureCoordinate).r;\n"
"float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity +2.0 * bottomIntensity + bottomRightIntensity;\n"
"float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity +2.0 * rightIntensity + topRightIntensity;\n"
"float mag = length(vec2(h, v));\n"
"\n"
"vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;\n"
"\n"
"float thresholdTest = 1.0 - step(threshold, mag);\n"
"\n"
"outColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);\n"
"}";
// grayScale
const char grayScaleShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"layout(location = 0) out vec4 outColor;\n"
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"float luminance = dot(textureColor.rgb, W);\n"
"outColor = vec4(vec3(luminance), textureColor.a);\n"
"}";
// sketch
const char sketchShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"layout(location = 0) out vec4 outColor;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"in vec2 textureCoordinate;\n"
"in vec2 leftTextureCoordinate;\n"
"in vec2 rightTextureCoordinate;\n"
"\n"
"in vec2 topTextureCoordinate;\n"
"in vec2 topLeftTextureCoordinate;\n"
"in vec2 topRightTextureCoordinate;\n"
"\n"
"in vec2 bottomTextureCoordinate;\n"
"in vec2 bottomLeftTextureCoordinate;\n"
"in vec2 bottomRightTextureCoordinate;\n"
"\n"
"void main()\n"
"{\n"
"float bottomLeftIntensity = texture(s_TextureMap, bottomLeftTextureCoordinate).r;\n"
"float topRightIntensity = texture(s_TextureMap, topRightTextureCoordinate).r;\n"
"float topLeftIntensity = texture(s_TextureMap, topLeftTextureCoordinate).r;\n"
"float bottomRightIntensity = texture(s_TextureMap, bottomRightTextureCoordinate).r;\n"
"float leftIntensity = texture(s_TextureMap, leftTextureCoordinate).r;\n"
"float rightIntensity = texture(s_TextureMap, rightTextureCoordinate).r;\n"
"float bottomIntensity = texture(s_TextureMap, bottomTextureCoordinate).r;\n"
"float topIntensity = texture(s_TextureMap, topTextureCoordinate).r;\n"
"float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity +2.0 * bottomIntensity + bottomRightIntensity;\n"
"float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity +2.0 * rightIntensity + topRightIntensity;\n"
"float mag = 1.0-length(vec2(h, v));\n"
"outColor = vec4(vec3(mag), 1.0);\n"
"}";
// blur
const char blurShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"in vec2 v_texCoord;\n"
"layout(location = 0) out vec4 outColor;\n"
"uniform highp int blurRadius;\n"
"uniform highp vec2 blurOffset;\n"
"\n"
"uniform highp float sumWeight;\n"
"float PI = 3.1415926;\n"
"float getWeight(int i)\n"
"{\n"
"float sigma = float(blurRadius) / 3.0 ;\n"
"return (1.0 / sqrt(2.0 * PI * sigma * sigma)) * exp(-float(i * i) / (2.0 * sigma * sigma)) / sumWeight;\n"
"}\n"
"vec2 clampCoordinate (vec2 coordinate)\n"
"{\n"
"return vec2(clamp(coordinate.x, 0.0, 1.0), clamp(coordinate.y, 0.0, 1.0));\n"
"}\n"
"\n"
"void main()\n"
"{\n"
"vec4 sourceColor = texture(s_TextureMap, v_texCoord);\n"
"if (blurRadius <= 1)\n"
"{\n"
"outColor = sourceColor;\n"
"return;\n"
"}\n"
"float weight = getWeight(0);\n"
"vec3 finalColor = sourceColor.rgb * weight;\n"
"for(int i = 1; i < blurRadius; i++) {\n"
"weight = getWeight(i);\n"
"finalColor += texture(s_TextureMap, clampCoordinate(v_texCoord - blurOffset * float(i))).rgb * weight;\n"
"finalColor += texture(s_TextureMap, clampCoordinate(v_texCoord + blurOffset * float(i))).rgb * weight;\n"
"}\n"
"outColor = vec4(finalColor, sourceColor.a);\n"
"}\n";
#endif // GPU_ImageETS_constant_shape_H

View File

@ -0,0 +1,71 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/20.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#include <js_native_api.h>
#include <js_native_api_types.h>
#include <node_api.h>
#include "DebugLog.h"
#include "native_common.h"
#include "EGLRender.h"
static napi_value Init(napi_env env, napi_value exports)
{
napi_property_descriptor desc[] = {
DECLARE_NAPI_FUNCTION("EglRenderInit", EGLRender::RenderInit),
DECLARE_NAPI_FUNCTION("EglRenderSetImageData", EGLRender::RenderSetData),
DECLARE_NAPI_FUNCTION("EglPixelMapSurface", EGLRender::GetPixelMapOfSurface),
DECLARE_NAPI_FUNCTION("EglRenderSetIntParams", EGLRender::RenderSetIntParams),
DECLARE_NAPI_FUNCTION("EglIsInit", EGLRender::EGLIsInit),
DECLARE_NAPI_FUNCTION("EglDestroy", EGLRender::DestroyGlesEnv),
DECLARE_NAPI_FUNCTION("EglUseProgram", EGLRender::StartUseProgram),
DECLARE_NAPI_FUNCTION("EglRendering", EGLRender::Rendering),
DECLARE_NAPI_FUNCTION("EglUniform1i", EGLRender::RenderGlUniform1i),
DECLARE_NAPI_FUNCTION("EglUniform1f", EGLRender::RenderGlUniform1f),
DECLARE_NAPI_FUNCTION("EglUniform2fv", EGLRender::RenderGlUniform2fv),
DECLARE_NAPI_FUNCTION("EglSetTypeArrayOfFloat", EGLRender::setTypeArrayOfFloat),
DECLARE_NAPI_FUNCTION("EglSetTypeArrayOfMatrix3f", EGLRender::setTypeArrayOfMatrix3f),
DECLARE_NAPI_FUNCTION("EglSetTypeArrayOfMatrix4f", EGLRender::setTypeArrayOfMatrix4f),
};
NAPI_CALL(env, napi_define_properties(env, exports, sizeof(desc) / sizeof(desc[0]), desc));
return exports;
}
/**
* Napi Module define
*/
static napi_module nativeGpuModule = {
.nm_version =1,
.nm_flags = 0,
.nm_filename = nullptr,
.nm_register_func = Init,
.nm_modname = "nativeGpu",
.nm_priv = ((void*)0),
.reserved = { 0 },
};
extern "C" __attribute__((constructor)) void RegisterModule(void)
{
napi_module_register(&nativeGpuModule);
}

View File

@ -0,0 +1,801 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/20.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#include "EGLRender.h"
#include <js_native_api.h>
#include <mutex>
#include <napi/native_api.h>
#include <stdlib.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES3/gl3.h>
#include <stdint.h>
#include <js_native_api_types.h>
#include "native_common.h"
#include "DebugLog.h"
#include "constant/constant_shape.h"
#include "GLUtils.h"
#include "../util/NapiUtil.h"
const int PARAM_TYPE_SHADER_INDEX = 300;
const int VERTEX_POS_LOC = 0;
const int TEXTURE_POS_LOC = 1;
const int32_t STR_DEFAULT_SIZE = 1024;
EGLRender *EGLRender::sInstance = nullptr;
// 顶点坐标
const static GLfloat vVertices[] = {
-1.0f, -1.0f, 0.0f, // bottom left
1.0f, -1.0f, 0.0f, // bottom right
-1.0f, 1.0f, 0.0f, // top left
1.0f, 1.0f, 0.0f, // top right
};
// 正常纹理坐标
const static GLfloat vTexCoors[] = {
0.0f, 1.0f, // bottom left
1.0f, 1.0f, // bottom right
0.0f, 0.0f, // top left
1.0f, 0.0f, // top right
};
// fbo 纹理坐标与正常纹理方向不同(上下镜像)
const static GLfloat vFboTexCoors[] = {
0.0f, 0.0f, // bottom left
1.0f, 0.0f, // bottom right
0.0f, 1.0f, // top left
1.0f, 1.0f, // top right
};
const static GLushort indices[] = { 0, 1, 2, 1, 3, 2 };
std::mutex mtx;
EGLRender* EGLRender::GetInstance()
{
mtx.lock();
if (sInstance == nullptr) {
sInstance = new EGLRender();
}
mtx.unlock();
return sInstance;
}
napi_value EGLRender::RenderInit(napi_env env, napi_callback_info info)
{
napi_value exports;
NAPI_CALL(env, napi_create_object(env, &exports));
napi_property_descriptor desc[] = {};
NAPI_CALL(env, napi_define_properties(env, exports, sizeof(desc) / sizeof(desc[0]), desc));
EGLRender::GetInstance() ->Init();
return exports;
}
napi_value EGLRender::RenderSetData(napi_env env, napi_callback_info info)
{
size_t argc = 3;
napi_value args[3] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
void* buffer;
size_t bufferLength;
napi_status buffStatus = napi_get_arraybuffer_info(env, args[0], &buffer, &bufferLength);
if (buffStatus != napi_ok) {
return nullptr;
}
uint8_t* uint8_buf = reinterpret_cast<uint8_t *>(buffer);
uint32_t width;
napi_status wStatus = napi_get_value_uint32(env, args[1], &width);
if (wStatus != napi_ok) {
return nullptr;
}
uint32_t height;
napi_status hStatus = napi_get_value_uint32(env, args[2], &height);
if (hStatus != napi_ok) {
return nullptr;
}
EGLRender::GetInstance() -> SetImageData(uint8_buf, width, height);
return nullptr;
}
napi_value EGLRender::RenderSetIntParams(napi_env env, napi_callback_info info)
{
LOGI("gl--> RenderSetIntParams start");
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
uint32_t type;
napi_status tStatus = napi_get_value_uint32(env, args[0], &type);
if (tStatus != napi_ok) {
return nullptr;
}
uint32_t param;
napi_status pStatus = napi_get_value_uint32(env, args[1], &param);
if (pStatus != napi_ok) {
return nullptr;
}
EGLRender::GetInstance() -> SetIntParams(type, param);
return nullptr;
}
napi_value EGLRender::GetPixelMapOfSurface(napi_env env, napi_callback_info info)
{
size_t argc = 4;
napi_value args[4] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
uint32_t x;
napi_status xStatus = napi_get_value_uint32(env, args[0], &x);
if (xStatus != napi_ok) {
return nullptr;
}
uint32_t y;
napi_status yStatus = napi_get_value_uint32(env, args[1], &y);
if (yStatus != napi_ok) {
return nullptr;
}
uint32_t surfaceWidth;
napi_status swStatus = napi_get_value_uint32(env, args[2], &surfaceWidth);
if (swStatus != napi_ok) {
return nullptr;
}
uint32_t surfaceHeight;
napi_status shStatus = napi_get_value_uint32(env, args[3], &surfaceHeight);
if (shStatus != napi_ok) {
return nullptr;
}
uint8_t* pixels = (uint8_t*) malloc(surfaceWidth * surfaceHeight * DEFAULT_FOUR);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
glReadPixels(x, y, surfaceWidth, surfaceHeight, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
NativeImageUtil::Flip(&pixels, surfaceWidth, surfaceHeight);
napi_value array;
int byte_length = surfaceWidth * surfaceHeight * DEFAULT_FOUR;
if (!NativeImageUtil::CreateArrayBuffer(env, pixels, byte_length, &array)) {
LOGI("gl--> GetPixelMapOfSurface error");
}
free(pixels);
return array;
}
napi_value EGLRender::EGLIsInit(napi_env env, napi_callback_info info)
{
napi_value isInit;
int32_t value;
if (EGLRender::GetInstance() -> m_IsGLContextReady) {
value = 1;
} else {
value = 0;
}
napi_status status = napi_create_int32(env, value, &isInit);
if (status != napi_ok) {
return nullptr;
}
return isInit;
}
napi_value EGLRender::DestroyGlesEnv(napi_env env, napi_callback_info info)
{
EGLRender::GetInstance() -> UnInit();
return nullptr;
}
napi_value EGLRender::StartUseProgram(napi_env env, napi_callback_info info)
{
EGLRender::GetInstance() -> UseProgram();
return nullptr;
}
napi_value EGLRender::Rendering(napi_env env, napi_callback_info info)
{
// 渲染
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, (const void *)0);
glBindVertexArray(GL_NONE);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
return nullptr;
}
napi_value EGLRender::setTypeArrayOfFloat(napi_env env, napi_callback_info info)
{
size_t argc = 3;
napi_value args[3] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string locationContent;
NapiUtil::JsValueToString(env, args[0], STR_DEFAULT_SIZE, locationContent);
char* location = (char*)locationContent.c_str();
std::string content;
NapiUtil::JsValueToString(env, args[1], STR_DEFAULT_SIZE, content);
char* key = (char*)content.c_str();
napi_typedarray_type dataType = napi_float32_array;
void* buffer;
size_t bufferLength;
size_t byte_offset;
napi_status buffStatus = napi_get_typedarray_info(env,
args[2], &dataType, &bufferLength, &buffer, &args[2], &byte_offset);
if (buffStatus != napi_ok) {
return nullptr;
}
float* value = reinterpret_cast<float *>(buffer);
int uniformType;
if (strcmp(key, "glUniform2fv") == 0) {
uniformType = UNIFORM_TYPE_2FV;
} else if (strcmp(key, "glUniform3fv") == 0) {
uniformType = UNIFORM_TYPE_3FV;
} else if (strcmp(key, "glUniform4fv") == 0) {
uniformType = UNIFORM_TYPE_4FV;
} else if (strcmp(key, "glUniform1fv") == 0) {
uniformType = UNIFORM_TYPE_FV;
} else if (strcmp(key, "glUniform2f") == 0) {
uniformType = UNIFORM_TYPE_2F;
}
EGLRender::GetInstance() -> GlUniformArray(location, value, uniformType);
return nullptr;
}
napi_value EGLRender::setTypeArrayOfMatrix3f(napi_env env, napi_callback_info info)
{
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string locationContent;
NapiUtil::JsValueToString(env, args[0], STR_DEFAULT_SIZE, locationContent);
char* location = (char*)locationContent.c_str();
napi_typedarray_type dataType = napi_float32_array;
void* buffer;
size_t bufferLength;
size_t byte_offset;
napi_status buffStatus = napi_get_typedarray_info(env,
args[1], &dataType, &bufferLength, &buffer, &args[1], &byte_offset);
if (buffStatus != napi_ok) {
return nullptr;
}
float* value = reinterpret_cast<float *>(buffer);
EGLRender::GetInstance() -> GlUniformMatrix(location, value, UNIFORM_TYPE_THREE);
return nullptr;
}
napi_value EGLRender::setTypeArrayOfMatrix4f(napi_env env, napi_callback_info info)
{
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string locationContent;
NapiUtil::JsValueToString(env, args[0], STR_DEFAULT_SIZE, locationContent);
char* location = (char*)locationContent.c_str();
napi_typedarray_type dataType = napi_float32_array;
void* buffer;
size_t bufferLength;
size_t byte_offset;
napi_status buffStatus = napi_get_typedarray_info(env,
args[1], &dataType, &bufferLength, &buffer, &args[1], &byte_offset);
if (buffStatus != napi_ok) {
return nullptr;
}
float* value = reinterpret_cast<float *>(buffer);
EGLRender::GetInstance() -> GlUniformMatrix(location, value, UNIFORM_TYPE_FOUR);
return nullptr;
}
napi_value EGLRender::RenderGlUniform1i(napi_env env, napi_callback_info info)
{
// int
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string content;
NapiUtil::JsValueToString(env, args[0], STR_DEFAULT_SIZE, content);
uint32_t value;
napi_status status = napi_get_value_uint32(env, args[1], &value);
if (status != napi_ok) {
return nullptr;
}
EGLRender::GetInstance() -> GlUniform((char*)content.c_str(), value, UNIFORM_TYPE_ZERO);
return nullptr;
}
napi_value EGLRender::RenderGlUniform1f(napi_env env, napi_callback_info info)
{
// float
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string content;
NapiUtil::JsValueToString(env, args[0], STR_DEFAULT_SIZE, content);
double value;
napi_status status = napi_get_value_double(env, args[1], &value);
if (status != napi_ok) {
return nullptr;
}
EGLRender::GetInstance() -> GlUniform((char*)content.c_str(), value, UNIFORM_TYPE_ONE);
return nullptr;
}
napi_value EGLRender::RenderGlUniform2fv(napi_env env, napi_callback_info info)
{
// float 数组
size_t argc = 3;
napi_value args[3] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string content;
NapiUtil::JsValueToString(env, args[0], STR_DEFAULT_SIZE, content);
double value;
napi_status status = napi_get_value_double(env, args[1], &value);
if (status != napi_ok) {
return nullptr;
}
double value2;
napi_status status2 = napi_get_value_double(env, args[2], &value2);
if (status2 != napi_ok) {
return nullptr;
}
float vce2[2];
vce2[0] = value;
vce2[1] = value2;
EGLRender::GetInstance() -> GlUniformArray((char*)content.c_str(), vce2, UNIFORM_TYPE_2FV_SIZE);
return nullptr;
}
EGLRender::EGLRender()
{
m_ImageTextureId = GL_NONE;
m_FboTextureId = GL_NONE;
m_SamplerLoc = GL_NONE;
m_TexSizeLoc = GL_NONE;
m_FboId = GL_NONE;
m_ProgramObj = GL_NONE;
m_VertexShader = GL_NONE;
m_FragmentShader = GL_NONE;
m_eglDisplay = nullptr;
m_IsGLContextReady = false;
m_ShaderIndex = 0;
}
EGLRender::~EGLRender()
{
}
void EGLRender::TexturesInit()
{
glGenTextures(1, &m_ImageTextureId); // 生成纹理名称
glBindTexture(GL_TEXTURE_2D, m_ImageTextureId); // 允许建立一个绑定到目标纹理的有名称的纹理
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
glGenTextures(1, &m_FboTextureId);
glBindTexture(GL_TEXTURE_2D, m_FboTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
}
void EGLRender::Init()
{
if (CreateGlEnv() == 0) {
m_IsGLContextReady = true;
}
if (!m_IsGLContextReady) {
return;
}
EGLRender::GetInstance() -> TexturesInit();
m_ProgramObj = GLUtils::CreateProgram(vShaderStr, fShaderStr0, m_VertexShader,
m_FragmentShader);
if (!m_ProgramObj) {
GLUtils::CheckGLError("Create Program");
return;
}
glGenBuffers(DEFAULT_THREE, m_VboIds);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[DEFAULT_ZERO]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vVertices), vVertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[DEFAULT_ONE]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vFboTexCoors), vTexCoors, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_VboIds[DEFAULT_TWO]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glGenVertexArrays(DEFAULT_ONE, m_VaoIds);
glBindVertexArray(m_VaoIds[DEFAULT_ZERO]);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[DEFAULT_ZERO]);
glEnableVertexAttribArray(VERTEX_POS_LOC);
glVertexAttribPointer(VERTEX_POS_LOC,
DEFAULT_THREE,
GL_FLOAT, GL_FALSE,
DEFAULT_THREE * sizeof(GLfloat),
(const void *)DEFAULT_ZERO);
glBindBuffer(GL_ARRAY_BUFFER, GL_NONE);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[DEFAULT_ONE]);
glEnableVertexAttribArray(TEXTURE_POS_LOC);
glVertexAttribPointer(TEXTURE_POS_LOC,
DEFAULT_TWO,
GL_FLOAT,
GL_FALSE,
DEFAULT_TWO * sizeof(GLfloat),
(const void *)DEFAULT_ZERO);
glBindBuffer(GL_ARRAY_BUFFER, GL_NONE);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_VboIds[DEFAULT_TWO]);
glBindVertexArray(GL_NONE);
}
int EGLRender::CreateGlEnv()
{
const EGLint confAttr[] = {
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT_KHR, EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8, EGL_ALPHA_SIZE, 8,
EGL_DEPTH_SIZE, 16, EGL_STENCIL_SIZE, 8, EGL_NONE
};
const EGLint ctxAttr[] = {
EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE
};
const EGLint surfaceAttr[] = {
EGL_WIDTH, 1, EGL_HEIGHT, 1, EGL_NONE
};
EGLint eglMajVers, eglMinVers;
EGLint numConfigs;
int resultCode = 0;
do {
m_eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (m_eglDisplay == EGL_NO_DISPLAY) {
resultCode = -1;
break;
}
// 初始化 egl 方法
if (!eglInitialize(m_eglDisplay, &eglMajVers, &eglMinVers)) {
resultCode = -1;
break;
}
// 获取 EGLConfig 对象,确定渲染表面的配置信息
if (!eglChooseConfig(m_eglDisplay, confAttr, &m_eglConf, 1, &numConfigs)) {
resultCode = -1;
break;
}
// 创建渲染表面 EGLSurface 使用 eglCreateBufferSurface 创建屏幕外渲染区域
m_eglSurface = eglCreatePbufferSurface(m_eglDisplay, m_eglConf, surfaceAttr);
if (m_eglSurface == EGL_NO_SURFACE) {
LOGI("gl-->::CreateGlesEnv happen default error");
break;
}
// 创建渲染上下文 EGLContext
m_eglCtx = eglCreateContext(m_eglDisplay, m_eglConf, EGL_NO_CONTEXT, ctxAttr);
if (m_eglCtx == EGL_NO_CONTEXT) {
EGLint error = eglGetError();
if (error == EGL_BAD_CONFIG) {
resultCode = -1;
break;
}
}
// 绑定上下文
if (!eglMakeCurrent(m_eglDisplay, m_eglSurface, m_eglSurface, m_eglCtx)) {
resultCode = -1;
break;
}
} while (false);
return resultCode;
}
void EGLRender::SetImageData(uint8_t *pData, int width, int height)
{
if (pData && m_IsGLContextReady) {
if (m_RenderImage.ppPlane[0]) {
NativeImageUtil::FreeNativeImage(&m_RenderImage);
m_RenderImage.ppPlane[0] = nullptr;
}
m_RenderImage.width = width;
m_RenderImage.height = height;
m_RenderImage.format = IMAGE_FORMAT_RGBA;
NativeImageUtil::AllocNativeImage(&m_RenderImage);
memcpy(m_RenderImage.ppPlane[0], pData, width * height * DEFAULT_FOUR);
glBindTexture(GL_TEXTURE_2D, m_ImageTextureId);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
m_RenderImage.width,
m_RenderImage.height,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
m_RenderImage.ppPlane[0]);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
if (m_FboId == GL_NONE) {
// Create FBO
glGenFramebuffers(1, &m_FboId);
glBindFramebuffer(GL_FRAMEBUFFER, m_FboId);
glBindTexture(GL_TEXTURE_2D, m_FboTextureId);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_FboTextureId, 0);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
m_RenderImage.width,
m_RenderImage.height,
0,
GL_RGBA,
GL_UNSIGNED_BYTE,
nullptr);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
LOGI("gl--> EGLRender ::SetImageData glCheckFramebufferStatus status != GL_FRAMEBUFFER_COMPLETE");
}
glBindTexture(GL_TEXTURE_2D, GL_NONE);
glBindFramebuffer(GL_FRAMEBUFFER, GL_NONE);
}
LOGI("gl--> :: SetImageData end");
}
}
void EGLRender::SetIntParams(int paramType, int param)
{
LOGI("gl--> EGLRender::SetIntParams paramType = %{public}d,param = %{public}d", paramType, param);
switch (paramType) {
case PARAM_TYPE_SHADER_INDEX: {
if (param >= 0) {
if (m_ProgramObj) {
glDeleteProgram(m_ProgramObj);
m_ProgramObj = GL_NONE;
}
const char* vShader[1];
vShader[0] = vShaderStr;
const char* fShader[1];
switch (param) {
case SHADER_TYPE_KUWAHARA: {
fShader[0] = fShaderStr3;
break;
}
case SHADER_TYPE_SWIRL: {
fShader[0] = swirlFShaderStr;
break;
}
case SHADER_TYPE_BRIGHT: {
fShader[0] = brightFShaderStr;
break;
}
case SHADER_TYPE_CONTRAST: {
fShader[0] = contrastFShaderStr;
break;
}
case SHADER_TYPE_INVERT: {
fShader[0] = invertFShaderStr;
break;
}
case SHADER_TYPE_PIXELATION: {
fShader[0] = pixelFShaderStr;
break;
}
case SHADER_TYPE_SEPIA: {
fShader[0] = colorMatrixFShaderStr;
break;
}
case SHADER_TYPE_SKETCH: {
fShader[0] = sketchShaderStr;
vShader[0] = v3x3ShaderStr;
break;
}
case SHADER_TYPE_TOON: {
fShader[0] = toonFShaderStr;
vShader[0] = v3x3ShaderStr;
break;
}
case SHADER_TYPE_VIGNETTE: {
fShader[0] = vignetteFShaderStr;
break;
}
case SHADER_TYPE_GRAYSCALE: {
fShader[0] = grayScaleShaderStr;
break;
}
case SHADER_TYPE_BLUR: {
fShader[0] = blurShaderStr;
break;
}
default:
vShader[0] = vShaderStr;
break;
}
m_ProgramObj = GLUtils::CreateProgram(vShader[0], fShader[0], m_VertexShader,
m_FragmentShader);
if (!m_ProgramObj) {
GLUtils::CheckGLError("Create Program");
LOGI("gl--> EGLRender::SetIntParams Could not create program.");
return;
}
m_SamplerLoc = glGetUniformLocation(m_ProgramObj, "s_TextureMap");
m_TexSizeLoc = glGetUniformLocation(m_ProgramObj, "u_texSize");
}
}
break;
default:
break;
}
}
void EGLRender::UseProgram()
{
if (m_ProgramObj == GL_NONE) {
return;
}
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_STENCIL_BUFFER_BIT | GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, m_RenderImage.width, m_RenderImage.height);
// DO FBO off screen rendering
glUseProgram(m_ProgramObj);
glBindFramebuffer(GL_FRAMEBUFFER, m_FboId);
glBindVertexArray(m_VaoIds[0]);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_ImageTextureId);
glUniform1i(m_SamplerLoc, 0);
if (m_TexSizeLoc > -1) {
GLfloat size[2];
size[0] = m_RenderImage.width;
size[1] = m_RenderImage.height;
glUniform2f(m_TexSizeLoc, size[0], size[1]);
}
}
void EGLRender::GlUniform(char* location, float value, int unType)
{
GLint ll = glGetUniformLocation(m_ProgramObj, location);
switch (unType) {
case UNIFORM_TYPE_ZERO:
glUniform1i(ll, (int)value);
break;
case UNIFORM_TYPE_ONE:
glUniform1f(ll, value);
break;
default:
break;
}
}
void EGLRender::GlUniformArray(char* location, float* value, int unType)
{
GLint ll = glGetUniformLocation(m_ProgramObj, location);
switch (unType) {
case UNIFORM_TYPE_2FV_SIZE:
GLfloat vec2[2];
vec2[0] = value[0] * m_RenderImage.width;
vec2[1] = value[1] * m_RenderImage.height;
glUniform2fv(ll, 1, vec2);
break;
case UNIFORM_TYPE_2F:
glUniform2f(ll, value[0], value[1]);
break;
case UNIFORM_TYPE_FV:
glUniform1fv(ll, 1, value);
break;
case UNIFORM_TYPE_2FV:
glUniform2fv(ll, 1, value);
break;
case UNIFORM_TYPE_3FV:
glUniform3fv(ll, 1, value);
break;
case UNIFORM_TYPE_4FV:
glUniform4fv(ll, 1, value);
break;
default:
break;
}
}
void EGLRender::GlUniformMatrix(char* location, float* value, int unType)
{
GLint ll = glGetUniformLocation(m_ProgramObj, location);
switch (unType) {
case UNIFORM_TYPE_THREE:
glUniformMatrix3fv(ll, 1, false, value);
break;
case UNIFORM_TYPE_FOUR:
glUniformMatrix4fv(ll, 1, false, value);
break;
default:
break;
}
}
void EGLRender::UnInit()
{
if (m_ProgramObj) {
glDeleteProgram(m_ProgramObj);
m_ProgramObj = GL_NONE;
}
if (m_ImageTextureId) {
glDeleteTextures(DEFAULT_ONE, &m_ImageTextureId);
m_ImageTextureId = GL_NONE;
}
if (m_FboTextureId) {
glDeleteTextures(DEFAULT_ONE, &m_FboTextureId);
m_FboTextureId = GL_NONE;
}
if (m_VboIds[DEFAULT_ZERO]) {
glDeleteBuffers(DEFAULT_THREE, m_VboIds);
m_VboIds[DEFAULT_ZERO] = GL_NONE;
m_VboIds[DEFAULT_ONE] = GL_NONE;
m_VboIds[DEFAULT_TWO] = GL_NONE;
}
if (m_VaoIds[DEFAULT_ZERO]) {
glDeleteVertexArrays(DEFAULT_ONE, m_VaoIds);
m_VaoIds[DEFAULT_ZERO] = GL_NONE;
}
if (m_FboId) {
glDeleteFramebuffers(1, &m_FboId);
m_FboId = GL_NONE;
}
if (m_IsGLContextReady) {
DestroyGl();
m_IsGLContextReady = false;
}
}
void EGLRender::DestroyGl()
{
// 释放 EGL 环境
if (m_eglDisplay != EGL_NO_DISPLAY) {
eglMakeCurrent(m_eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
eglDestroyContext(m_eglDisplay, m_eglCtx);
eglDestroySurface(m_eglDisplay, m_eglSurface);
eglReleaseThread();
eglTerminate(m_eglDisplay);
}
m_eglDisplay = EGL_NO_DISPLAY;
m_eglSurface = EGL_NO_SURFACE;
m_eglCtx = EGL_NO_CONTEXT;
}

View File

@ -0,0 +1,109 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/20.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#ifndef GPU_ImageETS_EGLRender_H
#define GPU_ImageETS_EGLRender_H
#include <EGL/egl.h>
#include <GLES3/gl3.h>
#include <js_native_api.h>
#include <napi/native_api.h>
#include <stdint.h>
#include "NativeImage.h"
const int EGL_FEATURE_NUM = 7;
class EGLRender {
private:
EGLRender();
~EGLRender();
public:
static void DestroyRender()
{
if (sInstance) {
delete sInstance;
sInstance = nullptr;
}
}
static EGLRender* GetInstance();
static napi_value RenderInit(napi_env env, napi_callback_info info);
static napi_value RenderSetData(napi_env env, napi_callback_info info);
static napi_value GetPixelMapOfSurface(napi_env env, napi_callback_info info);
static napi_value RenderSetIntParams(napi_env env, napi_callback_info info);
static napi_value EGLIsInit(napi_env env, napi_callback_info info);
static napi_value DestroyGlesEnv(napi_env env, napi_callback_info info);
static napi_value StartUseProgram(napi_env env, napi_callback_info info);
static napi_value Rendering(napi_env env, napi_callback_info info);
static napi_value RenderGlUniform1i(napi_env env, napi_callback_info info);
static napi_value RenderGlUniform1f(napi_env env, napi_callback_info info);
static napi_value RenderGlUniform2fv(napi_env env, napi_callback_info info);
static napi_value setTypeArrayOfFloat(napi_env env, napi_callback_info info);
static napi_value setTypeArrayOfMatrix3f(napi_env env, napi_callback_info info);
static napi_value setTypeArrayOfMatrix4f(napi_env env, napi_callback_info info);
void Init();
void TexturesInit();
int CreateGlEnv();
void SetImageData(uint8_t *pData, int width, int height);
void SetIntParams(int paramType, int param);
void UseProgram();
void Draw();
void GlUniform(char* location, float value, int unType);
void GlUniformArray(char* location, float* value, int unType);
void GlUniformMatrix(char* location, float* value, int unType);
void DestroyGl();
void UnInit();
private:
static EGLRender* sInstance;
GLuint m_ImageTextureId;
GLuint m_FboTextureId;
GLuint m_FboId;
GLuint m_VaoIds[1] = {GL_NONE};
GLuint m_VboIds[3] = {GL_NONE};
GLint m_SamplerLoc;
GLint m_TexSizeLoc;
NativeImage m_RenderImage;
GLuint m_ProgramObj;
GLuint m_VertexShader;
GLuint m_FragmentShader;
EGLConfig m_eglConf;
EGLSurface m_eglSurface;
EGLContext m_eglCtx;
EGLDisplay m_eglDisplay;
bool m_IsGLContextReady;
const char* m_fShaderStrs[EGL_FEATURE_NUM];
int m_ShaderIndex;
};
#endif // GPU_ImageETS_EGLRender_H

View File

@ -0,0 +1,29 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export const EglRenderInit: () => void;
export const EglRenderSetImageData: (bytes:ArrayBuffer,width:number,height:number) => void;
export const EglRenderSetIntParams: (paramType:number,param:number) => void;
export const EglPixelMapSurface: (x:number,y:number,w:number,h:number) => ArrayBuffer;
export const EglIsInit: () => number;
export const EglUseProgram: () => void;
export const EglRendering: () => void;
export const EglUniform1i: (key:string,value:number) => void;
export const EglUniform1f: (key:string,value:number) => void;
export const EglUniform2fv: (key:string,vf1:number,vf2:number) => void;
export const EglSetTypeArrayOfFloat: (key:string,uniformType:string,data:Float32Array) => void;
export const EglSetTypeArrayOfMatrix3f: (key:string,value:Float32Array) => void;
export const EglSetTypeArrayOfMatrix4f: (key:string,value:Float32Array) => void;
export const EglDestroy: () => void;

View File

@ -0,0 +1,4 @@
{
"name": "libentry.so",
"types": "./index.d.ts"
}

View File

@ -0,0 +1,31 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#ifndef GPU_ImageETS_DebugLog_H
#define GPU_ImageETS_DebugLog_H
#include <Hilog/log.h>
#define LOGI(...)((void)OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "OH_GPU_LOG", __VA_ARGS__))
#define LOGD(...)((void)OH_LOG_Print(LOG_APP, LOG_DEBUG, LOG_DOMAIN, "OH_GPU_LOG", __VA_ARGS__))
#define LOGW(...)((void)OH_LOG_Print(LOG_APP, LOG_WARN, LOG_DOMAIN, "OH_GPU_LOG", __VA_ARGS__))
#define LOGE(...)((void)OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "OH_GPU_LOG", __VA_ARGS__))
#endif // GPU_ImageETS_DebugLog_H

View File

@ -0,0 +1,102 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#include <malloc.h>
#include <stddef.h>
#include "DebugLog.h"
#include "GLUtils.h"
GLuint GLUtils::LoadShader(GLenum shaderType, const char *pSource)
{
GLuint shader = 0;
shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc((size_t)infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint GLUtils::CreateProgram(const char *pVertexShaderSource,
const char *pFragShaderSource,
GLuint &vertexShaderHandle,
GLuint &fragShaderHandle)
{
GLuint program = 0;
vertexShaderHandle = LoadShader(GL_VERTEX_SHADER, pVertexShaderSource);
if (!vertexShaderHandle) return program;
fragShaderHandle = LoadShader(GL_FRAGMENT_SHADER, pFragShaderSource);
if (!fragShaderHandle) return program;
program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShaderHandle);
CheckGLError("glAttachShader");
glAttachShader(program, fragShaderHandle);
CheckGLError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
glDetachShader(program, vertexShaderHandle);
glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
glDetachShader(program, fragShaderHandle);
glDeleteShader(fragShaderHandle);
fragShaderHandle = 0;
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc((size_t)bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
void GLUtils::CheckGLError(const char *pGLOperation)
{
for (GLint error = glGetError(); error; error = glGetError()) {
LOGI("GLUtils::CheckGLError GL Operation %{public}s() glError (0x%x)\n", pGLOperation, error);
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#ifndef GPU_ImageETS_GLUtils_H
#define GPU_ImageETS_GLUtils_H
#include <GLES3/gl3.h>
class GLUtils {
public:
static GLuint LoadShader(GLenum shaderType, const char *pSource);
static GLuint CreateProgram(const char *pVertexShaderSource, const char *pFragShaderSource,
GLuint &vertexShaderHandle,
GLuint &fragShaderHandle);
static GLuint CreateProgram(const char *pVertexShaderSource, const char *pFragShaderSource);
static void CheckGLError(const char *pGLOperation);
};
#endif // GPU_ImageETS_GLUtils_H

View File

@ -0,0 +1,48 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#include "NapiUtil.h"
#include <codecvt>
#include <cstdio>
#include <locale>
#include <string>
#include <string.h>
#include "DebugLog.h"
const int32_t MAX_STR_LENGTH = 1024;
void NapiUtil::JsValueToString(const napi_env &env, const napi_value &value, const int32_t bufLen, std::string &target)
{
if (bufLen <= 0 || bufLen > MAX_STR_LENGTH) {
LOGI("%s string too long malloc failed", __func__);
return;
}
std::unique_ptr <char[]> buf = std::make_unique <char[]>(bufLen);
if (buf.get() == nullptr) {
LOGI("%s nullptr js object to string malloc failed", __func__);
return;
}
(void) memset(buf.get(), 0, bufLen);
size_t result = 0;
napi_get_value_string_utf8(env, value, buf.get(), bufLen, &result);
target = buf.get();
}

View File

@ -0,0 +1,34 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#ifndef GPU_ImageETS_NapiUtil_H
#define GPU_ImageETS_NapiUtil_H
#include <string>
#include <napi/native_api.h>
#include "native_common.h"
class NapiUtil {
public:
static void JsValueToString(const napi_env &env, const napi_value &value, const int32_t bufLen,
std::string &target);
};
#endif // GPU_ImageETS_NapiUtil_H

View File

@ -0,0 +1,161 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
#ifndef GPU_ImageETS_NativeImage_H
#define GPU_ImageETS_NativeImage_H
#include <stdint.h>
#include <malloc.h>
#include <js_native_api.h>
#include <js_native_api_types.h>
#include <node_api.h>
#include <unistd.h>
#include <string.h>
#include "DebugLog.h"
#include "constant/constant_shape.h"
#define IMAGE_FORMAT_RGBA 0x01
#define IMAGE_FORMAT_NV21 0x02
#define IMAGE_FORMAT_NV12 0x03
#define IMAGE_FORMAT_I420 0x04
#define IMAGE_FORMAT_YUYV 0x05
#define IMAGE_FORMAT_GRAY 0x06
#define IMAGE_FORMAT_I444 0x07
#define IMAGE_FORMAT_P010 0x08
#define IMAGE_FORMAT_RGBA_EXT "RGB32"
#define IMAGE_FORMAT_NV21_EXT "NV21"
#define IMAGE_FORMAT_NV12_EXT "NV12"
#define IMAGE_FORMAT_I420_EXT "I420"
#define IMAGE_FORMAT_YUYV_EXT "YUYV"
#define IMAGE_FORMAT_GRAY_EXT "GRAY"
#define IMAGE_FORMAT_I444_EXT "I444"
#define IMAGE_FORMAT_P010_EXT "P010" // 16bit NV21
struct NativeImage {
int width;
int height;
int format;
uint8_t *ppPlane[DEFAULT_THREE];
NativeImage()
{
width = DEFAULT_ZERO;
height = DEFAULT_ZERO;
format = DEFAULT_ZERO;
ppPlane[DEFAULT_ZERO] = nullptr;
ppPlane[DEFAULT_ONE] = nullptr;
ppPlane[DEFAULT_TWO] = nullptr;
}
};
class NativeImageUtil {
public:
static void AllocNativeImage(NativeImage *pImage)
{
if (pImage ->height == DEFAULT_ZERO || pImage ->width == DEFAULT_ZERO) return;
switch (pImage -> format) {
case IMAGE_FORMAT_RGBA: {
pImage->ppPlane[DEFAULT_ZERO] =
static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * DEFAULT_FOUR));
}
break;
case IMAGE_FORMAT_YUYV: {
pImage->ppPlane[DEFAULT_ZERO] =
static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * DEFAULT_TWO));
}
break;
case IMAGE_FORMAT_NV12:
case IMAGE_FORMAT_NV21: {
pImage->ppPlane[DEFAULT_ZERO] =
static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * DEFAULT_ONE_HALF));
pImage->ppPlane[DEFAULT_ONE] =
pImage->ppPlane[DEFAULT_ZERO] + pImage->width * pImage->height;
}
break;
case IMAGE_FORMAT_I420: {
pImage->ppPlane[DEFAULT_ZERO] =
static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * DEFAULT_ONE_HALF));
pImage->ppPlane[DEFAULT_ONE] =
pImage->ppPlane[DEFAULT_ZERO] + pImage->width * pImage->height;
pImage->ppPlane[DEFAULT_TWO] =
pImage->ppPlane[DEFAULT_ONE] + pImage->width * (pImage->height >> DEFAULT_TWO);
}
break;
case IMAGE_FORMAT_GRAY: {
pImage->ppPlane[DEFAULT_ZERO] =
static_cast<uint8_t *>(malloc(pImage->width * pImage ->height));
}
break;
case IMAGE_FORMAT_I444: {
pImage->ppPlane[DEFAULT_ZERO] =
static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * DEFAULT_THREE));
}
break;
case IMAGE_FORMAT_P010: {
pImage->ppPlane[DEFAULT_ZERO] =
static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * DEFAULT_THREE));
pImage->ppPlane[DEFAULT_ONE] =
pImage->ppPlane[DEFAULT_ZERO] + pImage->width * pImage->height * DEFAULT_TWO;
}
break;
default:
break;
}
}
static void FreeNativeImage(NativeImage *pImage)
{
if (pImage == nullptr || pImage->ppPlane[DEFAULT_ZERO] == nullptr) return;
free(pImage->ppPlane[DEFAULT_ZERO]);
pImage->ppPlane[DEFAULT_ZERO] = nullptr;
pImage->ppPlane[DEFAULT_ONE] = nullptr;
pImage->ppPlane[DEFAULT_TWO] = nullptr;
}
static bool CreateArrayBuffer(napi_env env, void* src, size_t srcLen, napi_value *res)
{
if (src == nullptr || srcLen == DEFAULT_ZERO) {
return false;
}
void *nativePtr = nullptr;
if (napi_create_arraybuffer(env, srcLen, &nativePtr, res) != napi_ok || nativePtr == nullptr) {
return false;
}
memcpy(nativePtr, src, srcLen);
return true;
}
static void Flip(uint8_t** buf, int width, int height)
{
int totalLength = width * height * DEFAULT_FOUR;
int oneLineLength = width * DEFAULT_FOUR;
uint8_t* tmp = (uint8_t*)malloc(totalLength);
memcpy(tmp, *buf, totalLength);
memset(*buf, DEFAULT_ZERO, sizeof(uint8_t)*totalLength);
for (int i = 0; i < height; i++) {
memcpy(*buf + oneLineLength * i, tmp + totalLength - oneLineLength * (i+1), oneLineLength);
}
free(tmp);
}
};
#endif // GPU_ImageETS_NativeImage_H

View File

@ -0,0 +1,53 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImage3x3TextureSamplingFilter extends GPUImageFilter {
private texelWidth: number;
private texelHeight: number;
private lineSize: number = 1.0;
constructor() {
super();
}
getFilterType(): GPUFilterType {
return GPUFilterType.X3TEXTURE;
}
onInitialized() {
}
onReadySize() {
}
setLineSize(lineSize: number) {
this.lineSize = lineSize;
}
setTexelWidth(texelWidth: number) {
this.texelWidth = this.lineSize / texelWidth;
this.setFloat("texelWidth", this.texelWidth);
}
setTexelHeight(texelHeight: number) {
this.texelHeight = this.lineSize / texelHeight;
this.setFloat("texelHeight", this.texelHeight);
}
}

View File

@ -0,0 +1,82 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageBlurFilter extends GPUImageFilter {
private blurRadius: number;
private blurOffset: Array<number>;
private sumWeight: number;
constructor() {
super();
}
getFilterType(): GPUFilterType {
return GPUFilterType.BLUR;
}
onInitialized() {
}
onReadySize() {
}
setBlurRadius(blurRadius: number) {
this.blurRadius = blurRadius;
this.setInteger("blurRadius", this.blurRadius);
this.calculateSumWeight();
}
setBlurOffset(blurOffset: Array<number>) {
let offset = new Array<number>(2);
if (this.width <= 0 || this.height <= 0) {
throw new Error("the width or height must be greater than 0");
}
if (!blurOffset || blurOffset.length !== 2) {
throw new Error("you should a valid value needs to be set.")
}
offset[0] = blurOffset[0] / this.width;
offset[1] = blurOffset[1] / this.height;
this.blurOffset = offset;
this.setFloat2f("blurOffset", this.blurOffset);
}
setSumWeight(sumWeight: number) {
this.sumWeight = sumWeight;
this.setFloat("sumWeight", this.sumWeight);
}
private calculateSumWeight() {
if (this.blurRadius < 1) {
this.setSumWeight(0);
return;
}
let sumWeight = 0;
let sigma = this.blurRadius / 3.0;
for (let i = 0; i < this.blurRadius; i++) {
let weight = ((1.0 / Math.sqrt(2.0 * Math.PI * sigma * sigma)) * Math.exp(-(i * i) / (2.0 * sigma * sigma)));
sumWeight += weight;
if (i != 0) {
sumWeight += weight;
}
}
this.setSumWeight(sumWeight);
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageBrightnessFilter extends GPUImageFilter {
private brightness: number = 25
constructor(brightness?: number) {
super()
if (brightness) {
this.brightness = brightness;
}
}
getFilterType(): GPUFilterType {
return GPUFilterType.BRIGHT;
}
onInitialized() {
}
onReadySize() {
}
setBrightness(brightness: number) {
this.brightness = brightness;
this.setFloat("brightness", this.brightness);
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageColorInvertFilter extends GPUImageFilter {
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.INVERT;
}
onInitialized() {
}
onReadySize() {
}
}

View File

@ -0,0 +1,56 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageColorMatrixFilter extends GPUImageFilter {
private intensity: number = 1.0;
private colorMatrix: Array<number> = [
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
]
constructor(intensity?: number) {
super()
if (intensity) {
this.intensity = intensity;
}
}
getFilterType(): GPUFilterType {
return GPUFilterType.CONTRAST;
}
onInitialized() {
}
onReadySize() {
}
setIntensity(intensity: number) {
this.intensity = intensity;
this.setFloat("intensity", this.intensity);
}
setColorMatrix(colorMatrix: Array<number>) {
this.colorMatrix = colorMatrix;
this.setUniformMatrix4f("colorMatrix", this.colorMatrix);
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageContrastFilter extends GPUImageFilter {
private contrast: number = 1.0;
constructor(contrast?: number) {
super()
if (contrast) {
this.contrast = contrast;
}
}
getFilterType(): GPUFilterType {
return GPUFilterType.CONTRAST;
}
onInitialized() {
}
onReadySize() {
}
setContrast(contrast: number) {
this.contrast = contrast;
this.setFloat("contrast", this.contrast);
}
}

View File

@ -0,0 +1,278 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { NativeEglRender } from '../gl/NativeEglRender'
import { GPUFilterType } from '../gl/GPUFilterType'
import { Runnable } from '../interface/Runnable'
import LinkedList from '@ohos.util.LinkedList';
import ArrayList from '@ohos.util.ArrayList';
export abstract class GPUImageFilter {
private render: NativeEglRender;
private isInitialized: boolean;
private runOnDraw: LinkedList<Runnable>;
protected width: number;
protected height: number;
constructor() {
this.render = new NativeEglRender();
this.runOnDraw = new LinkedList();
}
private init() {
if (this.render) {
this.render.native_EglRenderInit();
}
this.onInitialized();
}
protected setSurfaceFilterType() {
let filter = this.getFilterType();
if (!this.render.native_glIsInit()) {
throw new Error("the egl surface not init");
}
this.render.native_EglRenderSetIntParams(300, filter.valueOf());
}
setImageData(buf: ArrayBuffer, width: number, height: number) {
if (!buf) {
throw new Error("this pixelMap data is empty");
}
if (width <= 0 || height <= 0) {
throw new Error("this pixelMap width and height is invalidation")
}
this.width = width;
this.height = height;
this.ensureInit();
this.onReadySize();
this.setSurfaceFilterType();
this.render.native_EglRenderSetImageData(buf, width, height);
}
protected onDraw() {
if (!this.render.native_glIsInit()) {
throw new Error("the egl surface not init")
}
this.render.native_EglUseProgram();
this.runPendingOnDrawTasks();
this.onRendering();
}
protected onRendering() {
this.render.native_EglRendering();
}
getPixelMapBuf(x: number, y: number, width: number, height: number): Promise<ArrayBuffer> {
if (x < 0 || y < 0) {
throw new Error("the x or y should be greater than 0")
}
if (width <= 0 || height <= 0) {
throw new Error("the width or height should be greater than 0")
}
let that = this;
return new Promise((resolve, rejects) => {
that.onDraw();
let buf = this.render.native_EglBitmapFromGLSurface(x, y, width, height);
if (!buf) {
rejects(new Error("get pixelMap fail"))
} else {
resolve(buf);
that.destroy();
}
})
}
ensureInit() {
if (this.render) {
this.isInitialized = this.render.native_glIsInit();
if (!this.isInitialized) {
this.init();
}
}
}
protected runPendingOnDrawTasks() {
while (this.runOnDraw.length > 0) {
this.runOnDraw.removeFirst().run();
}
}
protected addRunOnDraw(runAble: Runnable) {
if (!runAble) {
return;
}
this.runOnDraw.add(runAble);
}
protected setInteger(location: string, value: number) {
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
that.render.native_setInteger(location, value);
}
}
this.addRunOnDraw(able);
}
protected setFloat(location: string, value: number) {
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
that.render.native_setFloat(location, value);
}
}
this.addRunOnDraw(able);
}
protected setPoint(location: string, vf1: number, vf2: number) {
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
that.render.native_setPoint(location, vf1, vf2);
}
}
this.addRunOnDraw(able);
}
protected setFloat2f(location: string, value: Array<number>) {
if (value.length !== 2) {
return;
}
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
let array = new Float32Array(2);
array[0] = value[0];
array[1] = value[1];
that.render.native_setFloat2f(location, array);
}
}
this.addRunOnDraw(able);
}
protected setFloatVec2(location: string, value: Array<number>) {
if (value.length !== 2) {
return;
}
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
let array = new Float32Array(2);
array[0] = value[0];
array[1] = value[1];
that.render.native_setFloatVec2(location, array);
}
}
this.addRunOnDraw(able);
}
protected setFloatVec3(location: string, value: Array<number>) {
if (value.length !== 3) {
return;
}
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
let array = new Float32Array(3);
array[0] = value[0];
array[1] = value[1];
array[2] = value[2];
that.render.native_setFloatVec3(location, array);
}
}
this.addRunOnDraw(able);
}
protected setFloatVec4(location: string, value: Array<number>) {
if (value.length !== 4) {
return;
}
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
let array = new Float32Array(4);
array[0] = value[0];
array[1] = value[1];
array[2] = value[2];
array[3] = value[3];
that.render.native_setFloatVec4(location, array);
}
}
this.addRunOnDraw(able);
}
protected setUniformMatrix3f(location: string, value: Array<number>) {
if (!value) {
return;
}
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
let array = new Float32Array(value.length);
for (let i = 0; i < value.length; i++) {
array[i] = value[i];
}
that.render.native_setUniformMatrix3f(location, array);
}
}
this.addRunOnDraw(able);
}
protected setUniformMatrix4f(location: string, value: Array<number>) {
if (!value) {
return;
}
let that = this;
let able: Runnable = {
run() {
that.ensureInit();
let array = new Float32Array(value.length);
for (let i = 0; i < value.length; i++) {
array[i] = value[i];
}
that.render.native_setUniformMatrix4f(location, array);
}
}
this.addRunOnDraw(able);
}
getFilters(): ArrayList<GPUImageFilter> {
return null;
}
destroy() {
this.render.native_glIsDestroy();
this.render = null;
this.isInitialized = false;
}
abstract getFilterType(): GPUFilterType;
abstract onReadySize();
abstract onInitialized();
}

View File

@ -0,0 +1,36 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
import ArrayList from '@ohos.util.ArrayList';
export abstract class GPUImageFilterGroup extends GPUImageFilter {
private filters: ArrayList<GPUImageFilter>;
constructor() {
super()
this.filters = new ArrayList();
}
addFilter(aFilter: GPUImageFilter) {
this.filters.add(aFilter);
}
getFilters(): ArrayList<GPUImageFilter> {
return this.filters;
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageGrayscaleFilter extends GPUImageFilter {
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.GRAYSCALE;
}
onInitialized() {
}
onReadySize() {
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageKuwaharaFilter extends GPUImageFilter {
private _radius: number = 25;
constructor(radius?: number) {
super()
if (radius) {
this._radius = radius;
}
}
getFilterType(): GPUFilterType {
return GPUFilterType.KUWAHARA;
}
onInitialized() {
}
onReadySize() {
}
setRadius(radius: number) {
this._radius = radius;
this.setFloat("radius", this._radius);
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImagePixelationFilter extends GPUImageFilter {
private pixel: number = 1.0;
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.PIXELATION;
}
onInitialized() {
}
onReadySize() {
}
setPixel(pixel: number) {
this.pixel = pixel;
this.setFloat("imageWidthFactor", 1.0 / this.width);
this.setFloat("imageHeightFactor", 1.0 / this.height);
this.setFloat("pixel", this.pixel);
}
}

View File

@ -0,0 +1,39 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageColorMatrixFilter } from './GPUImageColorMatrixFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageSepiaToneFilter extends GPUImageColorMatrixFilter {
constructor(intensity?: number) {
super()
this.setIntensity(intensity ? intensity : 1.0);
this.setColorMatrix([
0.3588, 0.7044, 0.1368, 0.0,
0.2990, 0.5870, 0.1140, 0.0,
0.2392, 0.4696, 0.0912, 0.0,
0.0, 0.0, 0.0, 1.0
])
}
getFilterType(): GPUFilterType {
return GPUFilterType.SEPIA;
}
onReadySize() {
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImage3x3TextureSamplingFilter } from './GPUImage3x3TextureSamplingFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageSketchFilter extends GPUImage3x3TextureSamplingFilter {
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.SKETCH;
}
onInitialized() {
}
onReadySize() {
this.setTexelWidth(this.width);
this.setTexelHeight(this.height);
}
}

View File

@ -0,0 +1,55 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageSwirlFilter extends GPUImageFilter {
private _radius: number = 25;
private _angle: number = 0.9;
private _xCenter: number = 0.5;
private _yCenter: number = 0.5;
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.SWIRL;
}
onInitialized() {
}
onReadySize() {
}
setRadius(radius: number) {
this._radius = radius;
this.setFloat("radius", this._radius);
}
setAngle(angle: number) {
this._angle = angle;
this.setFloat("angle", this._angle);
}
setCenter(x_center: number, y_center: number) {
this._xCenter = x_center;
this._yCenter = y_center;
this.setPoint("center", x_center, y_center);
}
}

View File

@ -0,0 +1,49 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImage3x3TextureSamplingFilter } from './GPUImage3x3TextureSamplingFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageToonFilter extends GPUImage3x3TextureSamplingFilter {
private threshold: number = 0.2;
private quantizationLevels: number = 10.0;
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.TOON;
}
onInitialized() {
}
onReadySize() {
this.setTexelWidth(this.width);
this.setTexelHeight(this.height);
}
setThreshold(threshold: number) {
this.threshold = threshold;
this.setFloat("threshold", threshold);
}
setQuantizationLevels(quantizationLevels: number) {
this.quantizationLevels = quantizationLevels;
this.setFloat("quantizationLevels", quantizationLevels);
}
}

View File

@ -0,0 +1,59 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageVignetterFilter extends GPUImageFilter {
private vignetteCenter: Array<number> = [0.0, 0.0];
private vignetteColor: Array<number> = [0.0, 0.0, 0.0];
private vignetteStart: number;
private vignetteEnd: number;
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.VIGNETTE;
}
onInitialized() {
}
onReadySize() {
}
setVignetteCenter(center: Array<number>) {
this.vignetteCenter = center;
this.setFloatVec2("vignetteCenter", center);
}
setVignetteColor(colors: Array<number>) {
this.vignetteColor = colors;
this.setFloatVec3("vignetteColor", colors);
}
setVignetteStart(start: number) {
this.vignetteStart = start;
this.setFloat("vignetteStart", this.vignetteStart);
}
setVignetteEnd(end: number) {
this.vignetteEnd = end;
this.setFloat("vignetteEnd", this.vignetteEnd);
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export enum GPUFilterType {
BRIGHT,
CONTRAST,
INVERT,
PIXELATION,
KUWAHARA,
SEPIA,
SKETCH,
SWIRL,
TOON,
VIGNETTE,
GRAYSCALE,
X3TEXTURE,
BLUR,
COLOR_M
}

View File

@ -0,0 +1,101 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import nativeGpu from "libnativeGpu.so"
export class NativeEglRender {
static EGLTrue: number = 1;
native_EglRenderInit(): void {
nativeGpu.EglRenderInit();
}
native_EglRenderSetImageData(bytes: ArrayBuffer, width: number, height: number) {
nativeGpu.EglRenderSetImageData(bytes, width, height);
}
native_EglRenderSetIntParams(paramType: number, param: number) {
nativeGpu.EglRenderSetIntParams(paramType, param);
}
native_EglBitmapFromGLSurface(x: number, y: number, w: number, h: number): ArrayBuffer {
let num = nativeGpu.EglPixelMapSurface(x, y, w, h);
return num;
}
native_glIsInit(): boolean {
let initStatus = nativeGpu.EglIsInit();
if (initStatus === NativeEglRender.EGLTrue) {
return true;
}
return false;
}
native_EglUseProgram() {
nativeGpu.EglUseProgram();
}
native_EglRendering() {
nativeGpu.EglRendering();
}
native_setInteger(key: string, value: number) {
nativeGpu.EglUniform1i(key, value)
}
native_setFloat(key: string, value: number) {
nativeGpu.EglUniform1f(key, value)
}
native_setPoint(key: string, vf1: number, vf2: number) {
nativeGpu.EglUniform2fv(key, vf1, vf2);
}
native_setFloat2f(key: string, value: Float32Array) {
this.native_setTypeArray(key, "glUniform2f", value);
}
native_setFloatVec2(key: string, value: Float32Array) {
this.native_setTypeArray(key, "glUniform2fv", value);
}
native_setFloatVec3(key: string, value: Float32Array) {
this.native_setTypeArray(key, "glUniform3fv", value);
}
native_setFloatVec4(key: string, value: Float32Array) {
this.native_setTypeArray(key, "glUniform4fv", value);
}
native_setFloatArray(key: string, value: Float32Array) {
this.native_setTypeArray(key, "glUniform1fv", value);
}
native_setUniformMatrix3f(key: string, value: Float32Array) {
nativeGpu.EglSetTypeArrayOfMatrix3f(key, value);
}
native_setUniformMatrix4f(key: string, value: Float32Array) {
nativeGpu.EglSetTypeArrayOfMatrix4f(key, value);
}
native_setTypeArray(key: string, uniformType: string, data: Float32Array) {
nativeGpu.EglSetTypeArrayOfFloat(key, uniformType, data);
}
native_glIsDestroy() {
nativeGpu.EglDestroy();
}
}

View File

@ -0,0 +1,18 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export interface Runnable {
run();
}

View File

@ -0,0 +1,10 @@
{
"module": {
"name": "gpu_transform",
"type": "har",
"deviceTypes": [
"default",
"tablet"
]
}
}

View File

@ -0,0 +1,8 @@
{
"string": [
{
"name": "page_show",
"value": "page from npm package"
}
]
}

View File

@ -0,0 +1,8 @@
{
"string": [
{
"name": "page_show",
"value": "page from npm package"
}
]
}

View File

@ -0,0 +1,8 @@
{
"string": [
{
"name": "page_show",
"value": "page from npm package"
}
]
}

View File

@ -70,6 +70,9 @@ export * from './src/main/ets/components/imageknife/transform/SepiaFilterTransfo
export * from './src/main/ets/components/imageknife/transform/SketchFilterTransformation'
export * from './src/main/ets/components/imageknife/transform/MaskTransformation'
export * from './src/main/ets/components/imageknife/transform/SwirlFilterTransformation'
export * from './src/main/ets/components/imageknife/transform/KuwaharaFilterTransform'
export * from './src/main/ets/components/imageknife/transform/ToonFilterTransform'
export * from './src/main/ets/components/imageknife/transform/VignetteFilterTransform'
export * from './src/main/ets/components/imageknife/transform/TransformUtils'
export * from './src/main/ets/components/imageknife/transform/TransformType'
export * from './src/main/ets/components/imageknife/transform/pixelmap/CenterCrop'

View File

@ -12,6 +12,7 @@
},
"main": "index.ets",
"repository": "https://gitee.com/openharmony-tpc/ImageKnife",
"type": "module",
"version": "1.0.5",
"dependencies": {
"pako": "^1.0.5",
@ -19,7 +20,8 @@
"@ohos/disklrucache": "^1.0.0",
"@ohos/svg": "1.1.0",
"crc-32": "^1.2.0",
"spark-md5": "^3.0.2"
"spark-md5": "^3.0.2",
"@ohos/gpu_transform": "file:../gpu_transform"
},
"tags": [
"ImageCache",
@ -27,6 +29,5 @@
],
"license": "Apache License 2.0",
"devDependencies": {},
"name": "@ohos/imageknife",
"type": "module"
"name": "@ohos/imageknife"
}

View File

@ -13,36 +13,39 @@
* limitations under the License.
*/
import {DiskStrategy} from "../cache/diskstrategy/DiskStrategy"
import {AsyncCallback} from "../imageknife/interface/asynccallback"
import {AsyncSuccess} from "../imageknife/interface/AsyncSuccess"
import {IAllCacheInfoCallback} from "../imageknife/interface/IAllCacheInfoCallback"
import {AUTOMATIC} from "../cache/diskstrategy/enum/AUTOMATIC"
import {BaseTransform} from "../imageknife/transform/BaseTransform"
import {RotateImageTransformation} from "../imageknife/transform/RotateImageTransformation"
import {ImageKnifeData} from "../imageknife/ImageKnifeData"
import {CenterCrop} from '../imageknife/transform/pixelmap/CenterCrop'
import {CenterInside} from '../imageknife/transform/pixelmap/CenterInside'
import {FitCenter} from '../imageknife/transform/pixelmap/FitCenter'
import {RoundedCornersTransformation} from '../imageknife/transform/RoundedCornersTransformation'
import { DiskStrategy } from "../cache/diskstrategy/DiskStrategy"
import { AsyncCallback } from "../imageknife/interface/asynccallback"
import { AsyncSuccess } from "../imageknife/interface/AsyncSuccess"
import { IAllCacheInfoCallback } from "../imageknife/interface/IAllCacheInfoCallback"
import { AUTOMATIC } from "../cache/diskstrategy/enum/AUTOMATIC"
import { BaseTransform } from "../imageknife/transform/BaseTransform"
import { RotateImageTransformation } from "../imageknife/transform/RotateImageTransformation"
import { ImageKnifeData } from "../imageknife/ImageKnifeData"
import { CenterCrop } from '../imageknife/transform/pixelmap/CenterCrop'
import { CenterInside } from '../imageknife/transform/pixelmap/CenterInside'
import { FitCenter } from '../imageknife/transform/pixelmap/FitCenter'
import { RoundedCornersTransformation } from '../imageknife/transform/RoundedCornersTransformation'
import {CropCircleTransformation} from '../imageknife/transform/CropCircleTransformation'
import { CropCircleTransformation } from '../imageknife/transform/CropCircleTransformation'
import {CropCircleWithBorderTransformation} from '../imageknife/transform/CropCircleWithBorderTransformation'
import {CropSquareTransformation} from '../imageknife/transform/CropSquareTransformation'
import {CropTransformation} from '../imageknife/transform/CropTransformation'
import {CropType} from '../imageknife/transform/CropTransformation'
import {GrayscaleTransformation} from '../imageknife/transform/GrayscaleTransformation'
import {BrightnessFilterTransformation} from '../imageknife/transform/BrightnessFilterTransformation'
import {ContrastFilterTransformation} from '../imageknife/transform/ContrastFilterTransformation'
import {InvertFilterTransformation} from '../imageknife/transform/InvertFilterTransformation'
import {SepiaFilterTransformation} from '../imageknife/transform/SepiaFilterTransformation'
import {SketchFilterTransformation} from '../imageknife/transform/SketchFilterTransformation'
import {BlurTransformation} from '../imageknife/transform/BlurTransformation'
import {PixelationFilterTransformation} from '../imageknife/transform/PixelationFilterTransformation'
import {MaskTransformation} from '../imageknife/transform/MaskTransformation'
import {SwirlFilterTransformation} from '../imageknife/transform/SwirlFilterTransformation'
import {LogUtil} from '../imageknife/utils/LogUtil'
import { CropCircleWithBorderTransformation } from '../imageknife/transform/CropCircleWithBorderTransformation'
import { CropSquareTransformation } from '../imageknife/transform/CropSquareTransformation'
import { CropTransformation } from '../imageknife/transform/CropTransformation'
import { CropType } from '../imageknife/transform/CropTransformation'
import { GrayscaleTransformation } from '../imageknife/transform/GrayscaleTransformation'
import { BrightnessFilterTransformation } from '../imageknife/transform/BrightnessFilterTransformation'
import { ContrastFilterTransformation } from '../imageknife/transform/ContrastFilterTransformation'
import { InvertFilterTransformation } from '../imageknife/transform/InvertFilterTransformation'
import { SepiaFilterTransformation } from '../imageknife/transform/SepiaFilterTransformation'
import { SketchFilterTransformation } from '../imageknife/transform/SketchFilterTransformation'
import { BlurTransformation } from '../imageknife/transform/BlurTransformation'
import { PixelationFilterTransformation } from '../imageknife/transform/PixelationFilterTransformation'
import { MaskTransformation } from '../imageknife/transform/MaskTransformation'
import { SwirlFilterTransformation } from '../imageknife/transform/SwirlFilterTransformation'
import { KuwaharaFilterTransform } from '../imageknife/transform/KuwaharaFilterTransform'
import { ToonFilterTransform } from '../imageknife/transform/ToonFilterTransform'
import { VignetteFilterTransform } from '../imageknife/transform/VignetteFilterTransform'
import { LogUtil } from '../imageknife/utils/LogUtil'
export class RequestOption {
loadSrc: string | PixelMap | Resource;
@ -67,7 +70,6 @@ export class RequestOption {
retryholderSrc: PixelMap | Resource;
retryholderFunc: AsyncSuccess<ImageKnifeData>
retryholderData: ImageKnifeData
size: {
width: number,
height: number
@ -78,6 +80,8 @@ export class RequestOption {
onlyRetrieveFromCache: boolean = false;
isCacheable: boolean = true;
//开启GPU变换绘制
gpuEnabled: boolean = false;
// 变换相关
transformations: Array<BaseTransform<PixelMap>> = new Array();
generateCacheKey: string = "";
@ -173,10 +177,10 @@ export class RequestOption {
return this;
}
thumbnail(sizeMultiplier: number, func?: AsyncSuccess<ImageKnifeData>,displayTime?:number) {
thumbnail(sizeMultiplier: number, func?: AsyncSuccess<ImageKnifeData>, displayTime?: number) {
this.thumbSizeMultiplier = sizeMultiplier;
this.thumbHolderFunc = func;
if(displayTime){
if (displayTime) {
this.thumbDelayTime = displayTime;
}
return this;
@ -187,8 +191,6 @@ export class RequestOption {
return this;
}
addListener(func: AsyncCallback<ImageKnifeData>) {
this.requestListeners.push(func);
return this;
@ -218,111 +220,153 @@ export class RequestOption {
this.transformations.push(new CenterCrop());
return this;
}
centerInside() {
this.transformations.push(new CenterInside());
return this;
}
fitCenter() {
this.transformations.push(new FitCenter());
return this;
}
roundedCorners(obj:{ top_left: number, top_right: number, bottom_left: number, bottom_right: number }){
let transformation = new RoundedCornersTransformation({top_left: obj.top_left, top_right: obj.top_right, bottom_left: obj.bottom_left, bottom_right: obj.bottom_right})
roundedCorners(obj: {
top_left: number,
top_right: number,
bottom_left: number,
bottom_right: number
}) {
let transformation = new RoundedCornersTransformation({
top_left: obj.top_left,
top_right: obj.top_right,
bottom_left: obj.bottom_left,
bottom_right: obj.bottom_right
})
this.transformations.push(transformation);
return this;
}
cropCircle(){
cropCircle() {
let transformation = new CropCircleTransformation()
this.transformations.push(transformation);
return this;
}
cropCircleWithBorder(border:number, obj:{ r_color: number, g_color: number, b_color: number }){
let transformation = new CropCircleWithBorderTransformation(border,obj)
cropCircleWithBorder(border: number, obj: {
r_color: number,
g_color: number,
b_color: number
}) {
let transformation = new CropCircleWithBorderTransformation(border, obj)
this.transformations.push(transformation);
return this;
}
cropSquare(){
cropSquare() {
let transformation = new CropSquareTransformation()
this.transformations.push(transformation);
return this;
}
crop(width: number, height: number, cropType: CropType){
crop(width: number, height: number, cropType: CropType) {
let transformation = new CropTransformation(width, height, cropType)
this.transformations.push(transformation);
return this;
}
grayscale(){
grayscale() {
let transformation = new GrayscaleTransformation()
this.transformations.push(transformation);
return this;
}
brightnessFilter(brightness:number){
brightnessFilter(brightness: number) {
let transformation = new BrightnessFilterTransformation(brightness)
this.transformations.push(transformation);
return this;
}
contrastFilter(contrast:number){
contrastFilter(contrast: number) {
let transformation = new ContrastFilterTransformation(contrast)
this.transformations.push(transformation);
return this;
}
invertFilter(){
invertFilter() {
let transformation = new InvertFilterTransformation()
this.transformations.push(transformation);
return this;
}
sepiaFilter(){
sepiaFilter() {
let transformation = new SepiaFilterTransformation()
this.transformations.push(transformation);
return this;
}
sketchFilter(){
sketchFilter() {
let transformation = new SketchFilterTransformation()
this.transformations.push(transformation);
return this;
}
blur(radius: number){
blur(radius: number) {
let transformation = new BlurTransformation(radius)
this.transformations.push(transformation);
return this;
}
pixelationFilter(pixel: number){
pixelationFilter(pixel: number) {
let transformation = new PixelationFilterTransformation(pixel)
this.transformations.push(transformation);
return this;
}
swirlFilter(degree: number){
swirlFilter(degree: number) {
let transformation = new SwirlFilterTransformation(degree)
this.transformations.push(transformation);
return this;
}
mask(maskResource: Resource){
mask(maskResource: Resource) {
let transformation = new MaskTransformation(maskResource)
this.transformations.push(transformation);
return this;
}
transform(input:BaseTransform<PixelMap>){
kuwaharaFilter(radius: number) {
let transformation = new KuwaharaFilterTransform(radius);
this.transformations.push(transformation);
return this;
}
toonFilter(threshold: number, quantizationLevels: number) {
let transformation = new ToonFilterTransform(threshold, quantizationLevels);
this.transformations.push(transformation);
return this;
}
vignetteFilter(centerPoint: Array<number>, vignetteColor: Array<number>, vignetteSpace: Array<number>) {
let transformation = new VignetteFilterTransform(centerPoint, vignetteColor, vignetteSpace);
this.transformations.push(transformation);
return this;
}
transform(input: BaseTransform<PixelMap>) {
this.transformations.push(input);
return this;
}
transforms(inputs:BaseTransform<PixelMap>[]){
transforms(inputs: BaseTransform<PixelMap>[]) {
this.transformations = inputs;
return this;
}
//开启GPU变换绘制
enableGPU() {
this.gpuEnabled = true;
return this;
}
// 占位图解析成功
placeholderOnComplete(imageKnifeData: ImageKnifeData) {
@ -340,8 +384,6 @@ export class RequestOption {
}
// 缩略图解析成功
thumbholderOnComplete(imageKnifeData: ImageKnifeData) {
if (!this.loadMainReady && !(this.loadErrorReady || this.loadRetryReady)) {
@ -369,15 +411,15 @@ export class RequestOption {
LogUtil.log("失败占位图解析失败 error =" + error)
}
retryholderOnComplete(imageKnifeData: ImageKnifeData){
retryholderOnComplete(imageKnifeData: ImageKnifeData) {
this.retryholderData = imageKnifeData;
if(this.loadRetryReady){
if (this.loadRetryReady) {
this.retryholderFunc(imageKnifeData)
}
}
retryholderOnError(error){
LogUtil.log("重试占位图解析失败 error ="+ error)
retryholderOnError(error) {
LogUtil.log("重试占位图解析失败 error =" + error)
}
loadComplete(imageKnifeData: ImageKnifeData) {
@ -394,13 +436,13 @@ export class RequestOption {
}
loadError(err) {
LogUtil.log("loadError:"+err);
LogUtil.log("loadError stack=:"+JSON.stringify(err.stack));
LogUtil.log("loadError:" + err);
LogUtil.log("loadError stack=:" + JSON.stringify(err.stack));
//失败占位图展示规则
if (this.retryholderFunc) {
// 重试图层优先于加载失败展示
this.loadRetryReady = true;
if(this.retryholderData != null){
if (this.retryholderData != null) {
this.retryholderFunc(this.retryholderData)
}
} else {

View File

@ -71,7 +71,11 @@ export class BlurTransformation implements BaseTransform<PixelMap> {
}
imageSource.createPixelMap(options)
.then((data) => {
fastBlur.blur(data, this._mRadius, true, func);
if (request.gpuEnabled) {
fastBlur.blurGPU(data, this._mRadius, true, func);
} else {
fastBlur.blur(data, this._mRadius, true, func);
}
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);

View File

@ -17,9 +17,9 @@ import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import { LogUtil } from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageBrightnessFilter } from '@ohos/gpu_transform'
/**
* brightness value ranges from -1.0 to 1.0, with 0.0 as the normal level
@ -78,6 +78,18 @@ export class BrightnessFilterTransformation implements BaseTransform<PixelMap> {
let bufferData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.gpuEnabled) {
let filter = new GPUImageBrightnessFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setBrightness(this._mBrightness);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
var dataArray = new Uint8Array(bufferData);
for (let index = 0; index < dataArray.length; index += 4) {

View File

@ -17,9 +17,9 @@ import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import { LogUtil } from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageContrastFilter } from '@ohos/gpu_transform'
/**
* 以24位色图像为例子每种色彩都可以用0-255
@ -91,6 +91,18 @@ export class ContrastFilterTransformation implements BaseTransform<PixelMap> {
let bufferData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.gpuEnabled) {
let filter = new GPUImageContrastFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setContrast(this._mContrast)
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
var dataArray = new Uint8Array(bufferData);
let brightness = 0; //亮度的偏移量可以默认0

View File

@ -20,6 +20,7 @@ import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageGrayscaleFilter } from '@ohos/gpu_transform'
export class GrayscaleTransformation implements BaseTransform<PixelMap> {
getName() {
@ -70,6 +71,18 @@ export class GrayscaleTransformation implements BaseTransform<PixelMap> {
let bufferNewData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.gpuEnabled) {
let filter = new GPUImageGrayscaleFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
var dataArray = new Uint8Array(bufferData);
var dataNewArray = new Uint8Array(bufferNewData);

View File

@ -19,6 +19,7 @@ import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageColorInvertFilter } from '@ohos/gpu_transform'
/**
** Image inversion is particularly useful for enhancing white or gray detail in
@ -78,6 +79,18 @@ export class InvertFilterTransformation implements BaseTransform<PixelMap> {
let bufferData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.gpuEnabled) {
let filter = new GPUImageColorInvertFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
var dataArray = new Uint8Array(bufferData);
for (let index = 0; index < dataArray.length; index += 4) {

View File

@ -0,0 +1,103 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import image from "@ohos.multimedia.image"
import { LogUtil } from '../../imageknife/utils/LogUtil'
import { GPUImageKuwaharaFilter } from '@ohos/gpu_transform'
export class KuwaharaFilterTransform implements BaseTransform<PixelMap> {
private _mRadius: number;
constructor(radius: number) {
this._mRadius = radius;
}
getName() {
return "KuwaharaFilterTransform _mRadius:" + this._mRadius;
}
transform(buf: ArrayBuffer, request: RequestOption, func?: AsyncTransform<PixelMap>) {
if (!buf || buf.byteLength <= 0) {
LogUtil.log(Constants.PROJECT_TAG + ";KuwaharaFilterTransform buf is empty");
if (func) {
func(Constants.PROJECT_TAG + ";KuwaharaFilterTransform buf is empty", null);
}
return;
}
if (!request.gpuEnabled) {
LogUtil.error(Constants.PROJECT_TAG + ";the KuwaharaFilterTransform supported only in GPU mode");
if (func) {
func(Constants.PROJECT_TAG + ";;the KuwaharaFilterTransform supported only in GPU mode", null);
}
return;
}
var that = this;
var imageSource = image.createImageSource(buf as any);
TransformUtils.getPixelMapSize(imageSource, (error, size: {
width: number,
height: number
}) => {
if (!size) {
func(error, null)
return;
}
var pixelMapWidth = size.width;
var pixelMapHeight = size.height;
var targetWidth = request.size.width;
var targetHeight = request.size.height;
if (pixelMapWidth < targetWidth) {
targetWidth = pixelMapWidth;
}
if (pixelMapHeight < targetHeight) {
targetHeight = pixelMapHeight;
}
var options = {
editable: true,
desiredSize: {
width: targetWidth,
height: targetHeight
}
}
imageSource.createPixelMap(options)
.then((data) => {
that.kuwahara(data, targetWidth, targetHeight, func);
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);
func(e, null);
})
})
}
private async kuwahara(bitmap: image.PixelMap, targetWidth: number, targetHeight: number, func: AsyncTransform<PixelMap>) {
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImageKuwaharaFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setRadius(this._mRadius);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight)
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
}
}

View File

@ -18,7 +18,7 @@ import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import { LogUtil } from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { pixelUtils } from "../utils/PixelUtils"
@ -76,7 +76,11 @@ export class PixelationFilterTransformation implements BaseTransform<PixelMap> {
}
imageSource.createPixelMap(options)
.then((data) => {
pixelUtils.pixel(data, this._mPixel, func);
if (request.gpuEnabled) {
pixelUtils.pixelGPU(data, this._mPixel, func);
} else {
pixelUtils.pixel(data, this._mPixel, func);
}
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);

View File

@ -17,8 +17,9 @@ import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import { LogUtil } from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageSepiaToneFilter } from '@ohos/gpu_transform'
/**
* Applies a simple sepia effect.
@ -72,9 +73,20 @@ export class SepiaFilterTransformation implements BaseTransform<PixelMap> {
let data = await imageSource.createPixelMap(options);
let bufferData = new ArrayBuffer(data.getPixelBytesNumber());
let bufferNewData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.gpuEnabled) {
let filter = new GPUImageSepiaToneFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
let bufferNewData = new ArrayBuffer(data.getPixelBytesNumber());
var dataArray = new Uint8Array(bufferData);
var dataNewArray = new Uint8Array(bufferNewData);

View File

@ -63,7 +63,11 @@ export class SketchFilterTransformation implements BaseTransform<PixelMap> {
}
imageSource.createPixelMap(options)
.then((data) => {
CalculatePixelUtils.sketch(data, func);
if (request.gpuEnabled) {
CalculatePixelUtils.sketchGpu(data, func);
} else {
CalculatePixelUtils.sketch(data, func);
}
})
.catch((e) => {
func(e, null);

View File

@ -22,16 +22,27 @@ import image from '@ohos.multimedia.image'
import { PixelEntry } from '../entry/PixelEntry'
import { ColorUtils } from '../utils/ColorUtils'
import { CalculatePixelUtils } from '../utils/CalculatePixelUtils'
import { GPUImageSwirlFilter } from '@ohos/gpu_transform'
export class SwirlFilterTransformation implements BaseTransform<PixelMap> {
private _degree: number;
private radius: number = 0;
private _angle: number = 0.9;
private _xCenter: number = 0.5;
private _yCenter: number = 0.5;
constructor(degree: number) {
this._degree = degree;
constructor(radius: number, angle?: number, centerPoint?: Array<number>) {
this.radius = radius;
if (angle) {
this._angle = angle;
}
if (centerPoint && centerPoint.length === 2) {
this._xCenter = centerPoint[0];
this._yCenter = centerPoint[1];
}
}
getName() {
return 'SwirlFilterTransformation' + this._degree;
return 'SwirlFilterTransformation' + this.radius;
}
transform(buf: ArrayBuffer, request: RequestOption, func?: AsyncTransform<PixelMap>) {
@ -71,7 +82,7 @@ export class SwirlFilterTransformation implements BaseTransform<PixelMap> {
}
imageSource.createPixelMap(options)
.then((data) => {
this.swirl(data, this._degree, func);
this.swirl(data, this.radius, request, func);
})
.catch((e) => {
func(e, null);
@ -79,7 +90,7 @@ export class SwirlFilterTransformation implements BaseTransform<PixelMap> {
})
}
private async swirl(bitmap: any, degree: number, func?: AsyncTransform<PixelMap>) {
private async swirl(bitmap: image.PixelMap, degree: number, request: RequestOption, func?: AsyncTransform<PixelMap>) {
let imageInfo = await bitmap.getImageInfo();
let size = {
width: imageInfo.size.width,
@ -90,13 +101,28 @@ export class SwirlFilterTransformation implements BaseTransform<PixelMap> {
}
let width = size.width;
let height = size.height;
let pixEntry: Array<PixelEntry> = new Array();
let rgbData = CalculatePixelUtils.createInt2DArray(height, width);
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
if (request.gpuEnabled) {
let filter = new GPUImageSwirlFilter();
filter.setImageData(bufferData, width, height);
filter.setRadius(degree);
filter.setAngle(this._angle)
filter.setCenter(this._xCenter, this._yCenter)
let buf = await filter.getPixelMapBuf(0, 0, width, height);
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
return;
}
let pixEntry: Array<PixelEntry> = new Array();
let rgbData = CalculatePixelUtils.createInt2DArray(height, width);
let dataArray = new Uint8Array(bufferData);
let ph = 0;

View File

@ -0,0 +1,110 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import image from "@ohos.multimedia.image"
import { LogUtil } from '../../imageknife/utils/LogUtil'
import { GPUImageToonFilter } from '@ohos/gpu_transform'
export class ToonFilterTransform implements BaseTransform<PixelMap> {
private threshold: number = 0.2;
private quantizationLevels: number = 10.0;
constructor(threshold?: number, quantizationLevels?: number) {
if (threshold) {
this.threshold = threshold;
}
if (quantizationLevels) {
this.quantizationLevels = quantizationLevels;
}
}
getName() {
return "ToonFilterTransform threshold:" + this.threshold + ";quantizationLevels:" + this.quantizationLevels;
}
transform(buf: ArrayBuffer, request: RequestOption, func?: AsyncTransform<PixelMap>) {
if (!buf || buf.byteLength <= 0) {
LogUtil.log(Constants.PROJECT_TAG + ";ToonFilterTransform buf is empty");
if (func) {
func(Constants.PROJECT_TAG + ";ToonFilterTransform buf is empty", null);
}
return;
}
if (!request.gpuEnabled) {
LogUtil.error(Constants.PROJECT_TAG + ";the ToonFilterTransform supported only in GPU mode");
if (func) {
func(Constants.PROJECT_TAG + ";the ToonFilterTransform supported only in GPU mode", null);
}
return;
}
var that = this;
var imageSource = image.createImageSource(buf as any);
TransformUtils.getPixelMapSize(imageSource, (error, size: {
width: number,
height: number
}) => {
if (!size) {
func(error, null)
return;
}
var pixelMapWidth = size.width;
var pixelMapHeight = size.height;
var targetWidth = request.size.width;
var targetHeight = request.size.height;
if (pixelMapWidth < targetWidth) {
targetWidth = pixelMapWidth;
}
if (pixelMapHeight < targetHeight) {
targetHeight = pixelMapHeight;
}
var options = {
editable: true,
desiredSize: {
width: targetWidth,
height: targetHeight
}
}
imageSource.createPixelMap(options)
.then((data) => {
that.toon(data, targetWidth, targetHeight, func);
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);
func(e, null);
})
})
}
private async toon(bitmap: image.PixelMap, targetWidth: number, targetHeight: number, func: AsyncTransform<PixelMap>) {
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImageToonFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setThreshold(this.threshold);
filter.setQuantizationLevels(this.quantizationLevels);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight)
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
}
}

View File

@ -0,0 +1,116 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import image from "@ohos.multimedia.image"
import { LogUtil } from '../../imageknife/utils/LogUtil'
import { GPUImageVignetterFilter } from '@ohos/gpu_transform'
export class VignetteFilterTransform implements BaseTransform<PixelMap> {
private centerPoint: Array<number> = [0.5, 0.5];
private vignetteColor: Array<number> = [0.0, 0.0, 0.0];
private vignetteSpace: Array<number> = [0.3, 0.75];
constructor(centerPoint: Array<number>, vignetteColor: Array<number>, vignetteSpace: Array<number>) {
if (centerPoint.length === 2) {
this.centerPoint = centerPoint;
}
if (vignetteColor.length === 3) {
this.vignetteColor = vignetteColor;
}
if (vignetteSpace.length === 2) {
this.vignetteSpace = vignetteSpace;
}
}
getName() {
return "VignetteFilterTransform centerPoint:" + this.centerPoint + ";vignetteColor:" + this.vignetteColor + ";vignetteSpace:" + this.vignetteSpace;
}
transform(buf: ArrayBuffer, request: RequestOption, func?: AsyncTransform<PixelMap>) {
if (!buf || buf.byteLength <= 0) {
LogUtil.log(Constants.PROJECT_TAG + ";VignetteFilterTransform buf is empty");
if (func) {
func(Constants.PROJECT_TAG + ";VignetteFilterTransform buf is empty", null);
}
return;
}
if (!request.gpuEnabled) {
LogUtil.error(Constants.PROJECT_TAG + ";the VignetteFilterTransform supported only in GPU mode");
if (func) {
func(Constants.PROJECT_TAG + ";the VignetteFilterTransform supported only in GPU mode", null);
}
return;
}
var that = this;
var imageSource = image.createImageSource(buf as any);
TransformUtils.getPixelMapSize(imageSource, (error, size: {
width: number,
height: number
}) => {
if (!size) {
func(error, null)
return;
}
var pixelMapWidth = size.width;
var pixelMapHeight = size.height;
var targetWidth = request.size.width;
var targetHeight = request.size.height;
if (pixelMapWidth < targetWidth) {
targetWidth = pixelMapWidth;
}
if (pixelMapHeight < targetHeight) {
targetHeight = pixelMapHeight;
}
var options = {
editable: true,
desiredSize: {
width: targetWidth,
height: targetHeight
}
}
imageSource.createPixelMap(options)
.then((data) => {
that.vignette(data, targetWidth, targetHeight, func);
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);
func(e, null);
})
})
}
private async vignette(bitmap: image.PixelMap, targetWidth: number, targetHeight: number, func: AsyncTransform<PixelMap>) {
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImageVignetterFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setVignetteCenter(this.centerPoint);
filter.setVignetteColor(this.vignetteColor);
filter.setVignetteStart(this.vignetteSpace[0]);
filter.setVignetteEnd(this.vignetteSpace[1]);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight)
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
}
}

View File

@ -12,9 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {PixelEntry} from "../entry/PixelEntry"
import {AsyncTransform} from "../transform/AsyncTransform"
import {ColorUtils} from "./ColorUtils"
import { PixelEntry } from "../entry/PixelEntry"
import { AsyncTransform } from "../transform/AsyncTransform"
import { ColorUtils } from "./ColorUtils"
import { GPUImageSketchFilter } from '@ohos/gpu_transform'
export namespace CalculatePixelUtils {
export async function sketch(p: any, func: AsyncTransform<PixelMap>) {
@ -96,7 +97,7 @@ export namespace CalculatePixelUtils {
}
var gaussGray = (psrc: Array<number>, horz: number, vert: number,
width: number, height: number): number=> {
width: number, height: number): number => {
let dst, src, n_p, n_m, d_p, d_m, bd_p, bd_m, val_p, val_m, initial_p, initial_m: Array<number>;
let i, j, t, k, row, col, terms, std_dev, sp_p_idx, sp_m_idx, vp_idx, vm_idx: number;
let row_stride = width;
@ -204,8 +205,8 @@ export namespace CalculatePixelUtils {
}
var findConstants = (n_p: Array<number>, n_m: Array<number>, d_p: Array<number>,
d_m: Array<number>, bd_p: Array<number>
, bd_m: Array<number>, std_dev: number)=> {
d_m: Array<number>, bd_p: Array<number>
, bd_m: Array<number>, std_dev: number) => {
let div = Math.sqrt(2 * 3.141593) * std_dev;
let x0 = -1.783 / std_dev;
let x1 = -1.723 / std_dev;
@ -263,16 +264,16 @@ export namespace CalculatePixelUtils {
}
var transferGaussPixels = (src1: Array<number>, src2: Array<number>,
dest: Array<number>, bytes: number, width: number)=> {
dest: Array<number>, bytes: number, width: number) => {
let i, j, k, b, sum: number;
let bend = bytes * width;
i = j = k = 0;
for (b = 0; b < bend; b++) {
sum = src1[i++] + src2[j++];
if (sum > 255)
sum = 255;
sum = 255;
else if (sum < 0)
sum = 0;
sum = 0;
dest[k++] = sum;
}
}
@ -296,4 +297,30 @@ export namespace CalculatePixelUtils {
}
return array;
}
export async function sketchGpu(p: any, func: AsyncTransform<PixelMap>) {
let imageInfo = await p.getImageInfo();
let size = {
width: imageInfo.size.width,
height: imageInfo.size.height
}
if (!size) {
func(new Error("sketch The image size does not exist."), null)
return;
}
let w = size.width;
let h = size.height;
let bufferData = new ArrayBuffer(p.getPixelBytesNumber());
await p.readPixelsToBuffer(bufferData);
let filter = new GPUImageSketchFilter();
filter.setImageData(bufferData, w, h);
filter.getPixelMapBuf(0, 0, w, h).then((buf) => {
p.writeBufferToPixels(buf);
if (func) {
func("success", p);
}
})
}
}

View File

@ -17,7 +17,7 @@ import {CalculatePixelUtils} from "./CalculatePixelUtils"
import {PixelEntry} from "../entry/PixelEntry"
import {AsyncTransform} from "../transform/AsyncTransform"
import {ColorUtils} from "./ColorUtils"
import { GPUImageBlurFilter } from '@ohos/gpu_transform'
export namespace fastBlur {
@ -290,4 +290,37 @@ export namespace fastBlur {
func("success", bitmap);
}
}
export async function blurGPU(bitmap: any, radius: number, canReuseInBitmap: boolean, func: AsyncTransform<PixelMap>) {
if (radius < 1) {
func("error,radius must be greater than 1 ", null);
return;
}
let imageInfo = await bitmap.getImageInfo();
let size = {
width: imageInfo.size.width,
height: imageInfo.size.height
}
if (!size) {
func(new Error("fastBlur The image size does not exist."), null)
return;
}
let w = size.width;
let h = size.height;
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImageBlurFilter();
filter.setImageData(bufferData, w, h);
filter.setBlurRadius(radius);
filter.setBlurOffset([1.0, 1.0])
filter.getPixelMapBuf(0, 0, w, h).then((buf) => {
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
})
}
}

View File

@ -17,6 +17,7 @@ import {CalculatePixelUtils} from "./CalculatePixelUtils"
import {PixelEntry} from "../entry/PixelEntry"
import {AsyncTransform} from "../transform/AsyncTransform"
import {ColorUtils} from "./ColorUtils"
import {GPUImagePixelationFilter} from '@ohos/gpu_transform'
export namespace pixelUtils {
@ -129,4 +130,30 @@ export namespace pixelUtils {
func("success", bitmap);
}
}
export async function pixelGPU(bitmap: any, pixel: number, func: AsyncTransform<PixelMap>) {
let imageInfo = await bitmap.getImageInfo();
let size = {
width: imageInfo.size.width,
height: imageInfo.size.height
}
if (!size) {
func(new Error("GrayscaleTransformation The image size does not exist."), null)
return;
}
let w = size.width;
let h = size.height;
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImagePixelationFilter();
filter.setImageData(bufferData, w, h);
filter.setPixel(pixel)
filter.getPixelMapBuf(0, 0, w, h).then((buf) => {
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
})
}
}

View File

@ -1,19 +1,19 @@
{
"license": "ISC",
"devDependencies": {},
"name": "imageknife",
"ohos": {
"org": "huawei",
"directoryLevel": "project",
"buildTool": "hvigor"
"license":"ISC",
"devDependencies":{},
"name":"imageknife",
"ohos":{
"org":"huawei",
"directoryLevel":"project",
"buildTool":"hvigor"
},
"description": "example description",
"repository": {},
"version": "1.0.0",
"dependencies": {
"@ohos/hypium": "1.0.3",
"@ohos/hvigor-ohos-plugin": "1.3.1",
"hypium": "^1.0.0",
"@ohos/hvigor": "1.3.1"
"description":"example description",
"repository":{},
"version":"1.0.0",
"dependencies":{
"@ohos/hypium":"1.0.3",
"@ohos/hvigor-ohos-plugin":"1.3.1",
"hypium":"^1.0.0",
"@ohos/hvigor":"1.3.1"
}
}