add gpu transform module

Signed-off-by: tyBrave <tianyong13@huawei.com>
This commit is contained in:
tyBrave 2023-02-02 16:59:13 +08:00
parent 01c960daba
commit 75f08250a2
63 changed files with 4207 additions and 182 deletions

View File

@ -7,7 +7,7 @@
"name": "default",
"signingConfig": "default"
}
],
]
},
"modules": [
{
@ -25,6 +25,10 @@
{
"name": "imageknife",
"srcPath": "./imageknife"
},
{
"name": "gpu_transform",
"srcPath": "./gpu_transform"
}
]
}

View File

@ -22,8 +22,9 @@ import { CropCallback } from '@ohos/imageknife'
import { FileUtils } from '@ohos/imageknife'
@Component
@Entry
@Component
export struct CropImagePage2 {
@State options1: PixelMapCrop.Options = new PixelMapCrop.Options();
@State cropTap: boolean = false;

View File

@ -27,11 +27,11 @@ import { BrightnessFilterTransformation } from '@ohos/imageknife'
import { ContrastFilterTransformation } from '@ohos/imageknife'
import { InvertFilterTransformation } from '@ohos/imageknife'
import { SepiaFilterTransformation } from '@ohos/imageknife'
import {SketchFilterTransformation} from '@ohos/imageknife'
import {BlurTransformation} from '@ohos/imageknife'
import {PixelationFilterTransformation} from '@ohos/imageknife'
import {MaskTransformation} from '@ohos/imageknife'
import {SwirlFilterTransformation} from '@ohos/imageknife'
import { SketchFilterTransformation } from '@ohos/imageknife'
import { BlurTransformation } from '@ohos/imageknife'
import { PixelationFilterTransformation } from '@ohos/imageknife'
import { MaskTransformation } from '@ohos/imageknife'
import { SwirlFilterTransformation } from '@ohos/imageknife'
/**
@ -44,26 +44,29 @@ let mUrl = $r('app.media.pngSample');
@Entry
@Component
struct TransformPixelMapPage {
@State url: string= "";
@State mCropPixelMap: PixelMap= undefined;
@State mRoundPixelMap: PixelMap= undefined;
@State mCirclePixelMap: PixelMap= undefined;
@State mCircleBorderPixelMap: PixelMap= undefined;
@State mRotatePixelMap: PixelMap= undefined;
@State mSquarePixelMap: PixelMap= undefined;
@State mClipTopPixelMap: PixelMap= undefined;
@State mClipCenterPixelMap: PixelMap= undefined;
@State mClipBottomPixelMap: PixelMap= undefined;
@State mGrayscalePixelMap: PixelMap= undefined;
@State mBrightnessPixelMap: PixelMap= undefined;
@State mContrastPixelMap: PixelMap= undefined;
@State mInvertPixelMap: PixelMap= undefined;
@State mSepiaPixelMap: PixelMap= undefined;
@State mSketchPixelMap: PixelMap= undefined;
@State mBlurPixelMap: PixelMap= undefined;
@State mPixelPixelMap: PixelMap= undefined;
@State mSwirlPixelMap: PixelMap= undefined;
@State mMaskPixelMap: PixelMap= undefined;
@State url: string = "";
@State mCropPixelMap: PixelMap = undefined;
@State mRoundPixelMap: PixelMap = undefined;
@State mCirclePixelMap: PixelMap = undefined;
@State mCircleBorderPixelMap: PixelMap = undefined;
@State mRotatePixelMap: PixelMap = undefined;
@State mSquarePixelMap: PixelMap = undefined;
@State mClipTopPixelMap: PixelMap = undefined;
@State mClipCenterPixelMap: PixelMap = undefined;
@State mClipBottomPixelMap: PixelMap = undefined;
@State mGrayscalePixelMap: PixelMap = undefined;
@State mBrightnessPixelMap: PixelMap = undefined;
@State mContrastPixelMap: PixelMap = undefined;
@State mInvertPixelMap: PixelMap = undefined;
@State mSepiaPixelMap: PixelMap = undefined;
@State mSketchPixelMap: PixelMap = undefined;
@State mBlurPixelMap: PixelMap = undefined;
@State mPixelPixelMap: PixelMap = undefined;
@State mSwirlPixelMap: PixelMap = undefined;
@State mMaskPixelMap: PixelMap = undefined;
@State mKuwaharaPixelMap: PixelMap = undefined;
@State mToonPixelMap: PixelMap = undefined;
@State mVignettePixelMap: PixelMap = undefined;
build() {
Flex({ direction: FlexDirection.Column, alignItems: ItemAlign.Center }) {
@ -98,7 +101,7 @@ struct TransformPixelMapPage {
});
}.margin({ top: 10 })
Image(this.mCropPixelMap )
Image(this.mCropPixelMap)
.objectFit(ImageFit.None)
.width(100)
.height(100)
@ -151,7 +154,7 @@ struct TransformPixelMapPage {
});
}.margin({ top: 10 })
Image(this.mRoundPixelMap )
Image(this.mRoundPixelMap)
.objectFit(ImageFit.Fill)
.width(100)
.height(100)
@ -170,7 +173,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.circleTransformation();
});
Image(this.mCirclePixelMap )
Image(this.mCirclePixelMap)
.width(200)
.height(200)
.margin({ top: 10 })
@ -188,7 +191,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.circleBorderTransformation(5);
});
Image(this.mCircleBorderPixelMap )
Image(this.mCircleBorderPixelMap)
.width(200)
.height(200)
.margin({ top: 10 })
@ -210,7 +213,7 @@ struct TransformPixelMapPage {
}
this.transformRotate(mRotate);
});
Image(this.mRotatePixelMap )
Image(this.mRotatePixelMap)
.width(200)
.height(200)
.margin({ top: 10 })
@ -228,7 +231,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.transformSquare();
});
Image(this.mSquarePixelMap )
Image(this.mSquarePixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -266,17 +269,17 @@ struct TransformPixelMapPage {
}.margin({ top: 10 })
Row({ space: 1 }) {
Image(this.mClipTopPixelMap )
Image(this.mClipTopPixelMap)
.objectFit(ImageFit.Fill)
.width(100)
.height(100)
.margin({ top: 10 })
Image(this.mClipCenterPixelMap )
Image(this.mClipCenterPixelMap)
.objectFit(ImageFit.Fill)
.width(100)
.height(100)
.margin({ top: 10 })
Image(this.mClipBottomPixelMap )
Image(this.mClipBottomPixelMap)
.objectFit(ImageFit.Fill)
.width(100)
.height(100)
@ -295,7 +298,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.grayscalePixelMap();
});
Image(this.mGrayscalePixelMap )
Image(this.mGrayscalePixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -313,7 +316,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.brightnessPixelMap(0.8);
});
Image(this.mBrightnessPixelMap )
Image(this.mBrightnessPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -331,7 +334,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.contrastPixelMap(4);
});
Image(this.mContrastPixelMap )
Image(this.mContrastPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -349,7 +352,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.invertPixelMap();
});
Image(this.mInvertPixelMap )
Image(this.mInvertPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -368,7 +371,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.sepiaPixelMap();
});
Image(this.mSepiaPixelMap )
Image(this.mSepiaPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -386,7 +389,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.sketchPixelMap();
});
Image(this.mSketchPixelMap )
Image(this.mSketchPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -405,7 +408,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.blurHandlePixelMap(20);
});
Image(this.mBlurPixelMap )
Image(this.mBlurPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -425,7 +428,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.pixelHandlePixelMap(20);
});
Image(this.mPixelPixelMap )
Image(this.mPixelPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -444,7 +447,7 @@ struct TransformPixelMapPage {
.onClick(() => {
this.swirlHandlePixelMap();
});
Image(this.mSwirlPixelMap )
Image(this.mSwirlPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -463,7 +466,64 @@ struct TransformPixelMapPage {
.onClick(() => {
this.maskHandlePixelMap($r('app.media.mask_starfish'));
});
Image(this.mMaskPixelMap )
Image(this.mMaskPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
.margin({ top: 10 })
}.margin({ top: 10 });
Column() {
Text("KuwaharaFilterTransform").fontColor(Color.Gray).fontSize(16);
Button() {
Text("图片kuwahara").fontSize(13).fontColor(Color.White)
}
.height(35)
.width(120)
.margin({ top: 10 })
.onClick(() => {
this.kuwaharaHandlePixelMap();
});
Image(this.mKuwaharaPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
.margin({ top: 10 })
}.margin({ top: 10 });
Column() {
Text("ToonFilterTransform").fontColor(Color.Gray).fontSize(16);
Button() {
Text("图片toon").fontSize(13).fontColor(Color.White)
}
.height(35)
.width(120)
.margin({ top: 10 })
.onClick(() => {
this.toonHandlePixelMap();
});
Image(this.mToonPixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
.margin({ top: 10 })
}.margin({ top: 10 });
Column() {
Text("VignetteFilterTransform").fontColor(Color.Gray).fontSize(16);
Button() {
Text("图片vignette").fontSize(13).fontColor(Color.White)
}
.height(35)
.width(120)
.margin({ top: 10 })
.onClick(() => {
this.vignetteHandlePixelMap();
});
Image(this.mVignettePixelMap)
.objectFit(ImageFit.Fill)
.width(200)
.height(200)
@ -481,19 +541,19 @@ struct TransformPixelMapPage {
}
/**
/**
* centerCrop
*/
centerCrop() {
var imageKnifeOption = new RequestOption();
imageKnifeOption.load($r('app.media.jpgSample'))
// imageKnifeOption.load(mUrl)
// imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
this.mCropPixelMap = result;
setTimeout(() => {
let result2 = undefined;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCropPixelMap = result2;
}, 100)
return false;
@ -504,7 +564,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* centerInside
*/
centerInside() {
@ -515,7 +575,7 @@ struct TransformPixelMapPage {
this.mCropPixelMap = result;
setTimeout(() => {
let result2 = undefined;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCropPixelMap = result2;
}, 100)
return false;
@ -526,7 +586,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* centerInside
*/
fitCenter() {
@ -537,7 +597,7 @@ struct TransformPixelMapPage {
this.mCropPixelMap = result;
setTimeout(() => {
let result2 = undefined;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCropPixelMap = result2;
}, 100)
return false;
@ -547,11 +607,11 @@ struct TransformPixelMapPage {
.fitCenter();
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 圆角设置
*/
roundedCornersTransformation(top_left: number,
bottom_left: number, top_right: number, bottom_right: number) {
bottom_left: number, top_right: number, bottom_right: number) {
var imageKnifeOption = new RequestOption();
imageKnifeOption.load(mUrl)
@ -560,18 +620,23 @@ struct TransformPixelMapPage {
this.mRoundPixelMap = result;
setTimeout(() => {
let result2 = undefined;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
result2 = data.drawPixelMap.imagePixelMap as PixelMap;
this.mRoundPixelMap = result2;
}, 100)
return false;
})
.setImageViewSize({ width: vp2px(100), height: vp2px(100) })
.skipMemoryCache(true)
.roundedCorners({ top_left: top_left, top_right: top_right, bottom_left: bottom_left, bottom_right: bottom_right })
.roundedCorners({
top_left: top_left,
top_right: top_right,
bottom_left: bottom_left,
bottom_right: bottom_right
})
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 裁剪圆
*/
circleTransformation() {
@ -579,7 +644,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCirclePixelMap = result;
return false;
})
@ -589,7 +654,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 圆环裁剪
*/
circleBorderTransformation(border: number) {
@ -599,7 +664,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mCircleBorderPixelMap = result;
return false;
})
@ -610,7 +675,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 旋转
*/
transformRotate(angled: number) {
@ -619,7 +684,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mRotatePixelMap = result;
return false;
})
@ -629,7 +694,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 正方形裁剪
*/
transformSquare() {
@ -638,7 +703,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mSquarePixelMap = result;
return false;
})
@ -648,7 +713,7 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
/**
* 区域裁剪
*/
clipPixelMap(width: number, height: number, cropType: CropType) {
@ -658,13 +723,13 @@ struct TransformPixelMapPage {
.addListener((err, data) => {
let result = undefined;
if (cropType == CropType.TOP) {
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mClipTopPixelMap = result;
} else if (cropType == CropType.CENTER) {
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mClipCenterPixelMap = result;
} else if (cropType == CropType.BOTTOM) {
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mClipBottomPixelMap = result;
}
return false;
@ -676,7 +741,7 @@ struct TransformPixelMapPage {
}
/**
/**
* 灰度
*/
grayscalePixelMap() {
@ -685,18 +750,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mGrayscalePixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.grayscale()
ImageKnife.call(imageKnifeOption);
}
/**
/**
*亮度b
*/
brightnessPixelMap(brightness: number) {
@ -705,18 +771,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mBrightnessPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.brightnessFilter(brightness)
ImageKnife.call(imageKnifeOption);
}
/**
/**
*对比度
*/
contrastPixelMap(contrast: number) {
@ -725,18 +792,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mContrastPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.contrastFilter(contrast)
ImageKnife.call(imageKnifeOption);
}
/**
/**
*反转处理
*/
invertPixelMap() {
@ -745,18 +813,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mInvertPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.invertFilter()
ImageKnife.call(imageKnifeOption);
}
/**
/**
*照片老旧出来(黑褐色)
*/
sepiaPixelMap() {
@ -765,18 +834,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mSepiaPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.sepiaFilter()
ImageKnife.call(imageKnifeOption);
}
/**
/**
*素描
*/
sketchPixelMap() {
@ -785,18 +855,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mSketchPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.sketchFilter()
ImageKnife.call(imageKnifeOption);
}
/**
/**
*模糊
*/
blurHandlePixelMap(radius: number) {
@ -805,17 +876,18 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mBlurPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.blur(radius)
ImageKnife.call(imageKnifeOption);
}
/**
/**
*马赛克
*/
pixelHandlePixelMap(pixel: number) {
@ -824,18 +896,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mPixelPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.pixelationFilter(pixel)
ImageKnife.call(imageKnifeOption);
}
/**
/**
*扭曲
*/
swirlHandlePixelMap() {
@ -844,18 +917,19 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mSwirlPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.swirlFilter(80)
// .diskCacheStrategy(new NONE())
ImageKnife.call(imageKnifeOption);
}
/**
/**
*遮罩
*/
maskHandlePixelMap(maskResource: Resource) {
@ -865,7 +939,7 @@ struct TransformPixelMapPage {
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mMaskPixelMap = result;
return false;
})
@ -876,6 +950,69 @@ struct TransformPixelMapPage {
ImageKnife.call(imageKnifeOption);
}
/**
*kuwahar
*/
kuwaharaHandlePixelMap() {
let imageKnifeOption = new RequestOption();
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mKuwaharaPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.kuwaharaFilter(20.0)
// .diskCacheStrategy(new NONE())
ImageKnife.call(imageKnifeOption);
}
/**
*toon
*/
toonHandlePixelMap() {
let imageKnifeOption = new RequestOption();
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mToonPixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.toonFilter(0.2, 50.0);
// .diskCacheStrategy(new NONE())
ImageKnife.call(imageKnifeOption);
}
/**
*vignette
*/
vignetteHandlePixelMap() {
let imageKnifeOption = new RequestOption();
imageKnifeOption.load(mUrl)
.addListener((err, data) => {
let result = undefined;
result = data.drawPixelMap.imagePixelMap as PixelMap;
this.mVignettePixelMap = result;
return false;
})
.setImageViewSize({ width: vp2px(200), height: vp2px(200) })
.skipMemoryCache(true)
.openEfficient()
.vignetteFilter([0.5, 0.5], [0.0, 0.0, 0.0], [0.3, 0.5])
// .diskCacheStrategy(new NONE())
ImageKnife.call(imageKnifeOption);
}
}
var ImageKnife = globalThis.ImageKnife

4
gpu_transform/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
/node_modules
/.preview
/build
/.cxx

View File

@ -0,0 +1,19 @@
{
"apiType": "stageMode",
"buildOption": {
"externalNativeOptions": {
"path": "./src/main/cpp/CMakeLists.txt",
"arguments": "",
"abiFilters": [
"armeabi-v7a",
"arm64-v8a"
],
"cppFlags": ""
},
},
"targets": [
{
"name": "default"
}
]
}

View File

@ -0,0 +1,2 @@
// Script for compiling build behavior. It is built in the build plug-in and cannot be modified currently.
export { harTasks } from '@ohos/hvigor-ohos-plugin';

33
gpu_transform/index.ets Normal file
View File

@ -0,0 +1,33 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export { GPUImage3x3TextureSamplingFilter } from './src/main/ets/gpu/filter/GPUImage3x3TextureSamplingFilter'
export { GPUImageBlurFilter } from './src/main/ets/gpu/filter/GPUImageBlurFilter'
export { GPUImageBrightnessFilter } from './src/main/ets/gpu/filter/GPUImageBrightnessFilter'
export { GPUImageColorInvertFilter } from './src/main/ets/gpu/filter/GPUImageColorInvertFilter'
export { GPUImageColorMatrixFilter } from './src/main/ets/gpu/filter/GPUImageColorMatrixFilter'
export { GPUImageContrastFilter } from './src/main/ets/gpu/filter/GPUImageContrastFilter'
export { GPUImageFilter } from './src/main/ets/gpu/filter/GPUImageFilter'
export { GPUImageFilterGroup } from './src/main/ets/gpu/filter/GPUImageFilterGroup'
export { GPUImageGrayscaleFilter } from './src/main/ets/gpu/filter/GPUImageGrayscaleFilter'
export { GPUImageKuwaharaFilter } from './src/main/ets/gpu/filter/GPUImageKuwaharaFilter'
export { GPUImagePixelationFilter } from './src/main/ets/gpu/filter/GPUImagePixelationFilter'
export { GPUImageSepiaToneFilter } from './src/main/ets/gpu/filter/GPUImageSepiaToneFilter'
export { GPUImageSketchFilter } from './src/main/ets/gpu/filter/GPUImageSketchFilter'
export { GPUImageSwirlFilter } from './src/main/ets/gpu/filter/GPUImageSwirlFilter'
export { GPUImageToonFilter } from './src/main/ets/gpu/filter/GPUImageToonFilter'
export { GPUImageVignetterFilter } from './src/main/ets/gpu/filter/GPUImageVignetterFilter'

View File

@ -0,0 +1,14 @@
{
"license": "ISC",
"types": "",
"devDependencies": {},
"name": "@ohos/gpu_transform",
"description": "a npm package which contains arkUI2.0 page",
"ohos": {
"org": ""
},
"main": "index.ets",
"type": "module",
"version": "1.0.0",
"dependencies": {}
}

View File

@ -0,0 +1,33 @@
# the minimum version of CMake.
cmake_minimum_required(VERSION 3.4.1)
project(gpu_transform)
set(NATIVERENDER_ROOT_PATH ${CMAKE_CURRENT_SOURCE_DIR})
include_directories(${NATIVERENDER_ROOT_PATH}
${NATIVERENDER_ROOT_PATH}/include
${NATIVERENDER_ROOT_PATH}/util
${NATIVERENDER_ROOT_PATH}/napi
${NATIVERENDER_ROOT_PATH}/common
${NATIVERENDER_ROOT_PATH}/render
${NATIVERENDER_ROOT_PATH}/constant
)
add_library(nativeGpu SHARED
napi/napi_init.cpp
render/EGLRender.cpp
util/GLUtils.cpp
util/NapiUtil.cpp
)
find_library (
hilog-lib
hilog_ndk.z )
find_library (
EGL-lib
EGL )
find_library (
GLES-lib
GLESv3 )
target_link_libraries(nativeGpu PUBLIC ${hilog-lib} ${EGL-lib} ${GLES-lib} libace_napi.z.so libc++.a)

View File

@ -0,0 +1,57 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/15.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#ifndef GPU_ImageETS_native_common_H
#define GPU_ImageETS_native_common_H
#define NAPI_RETVAL_NOTHING
#define GET_AND_THROW_LAST_ERROR(env) \
do { \
const napi_extended_error_info* errorInfo = nullptr; \
napi_get_last_error_info((env),&errorInfo); \
bool isPending = false; \
napi_is_exception_pending((env),&isPending); \
if(!isPending && errorInfo != nullptr) { \
const char* errorMessage = \
errorInfo->error_message != nullptr ? errorInfo->error_message : "empty error message"; \
napi_throw_error((env),nullptr,errorMessage); \
} \
} while (0)
#define DECLARE_NAPI_FUNCTION(name, func) \
{ \
(name),nullptr,(func),nullptr,nullptr,nullptr,napi_default,nullptr \
}
#define NAPI_CALL_BASE(env, theCall, retVal) \
do { \
if((theCall) != napi_ok) { \
GET_AND_THROW_LAST_ERROR((env)); \
return retVal; \
} \
} while (0)
#define NAPI_CALL(env, theCall) NAPI_CALL_BASE(env, theCall, nullptr)
#endif //GPU_ImageETS_native_common_H

View File

@ -0,0 +1,448 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/16.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#ifndef GPU_ImageETS_constant_shape_H
#define GPU_ImageETS_constant_shape_H
const char vShaderStr[] =
"#version 300 es \n"
"layout(location = 0) in vec4 a_position; \n"
"layout(location = 1) in vec2 a_texCoord; \n"
"out vec2 v_texCoord; \n"
"void main() \n"
"{ \n"
" gl_Position = a_position; \n"
" v_texCoord = a_texCoord; \n"
"} \n";
const char fShaderStr0[] =
"#version 300 es \n"
"precision mediump float; \n"
"in vec2 v_texCoord; \n"
"layout(location = 0) out vec4 outColor; \n"
"uniform sampler2D s_TextureMap; \n"
"void main() \n"
"{ \n"
"outColor = texture(s_TextureMap,v_texCoord); \n"
"} \n";
const char v3x3ShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"layout(location = 0) in vec4 a_position;\n"
"layout(location = 1) in vec2 a_texCoord;\n"
"uniform highp float texelWidth;\n"
"uniform highp float texelHeight;\n"
"out vec2 v_texCoord;\n"
"out vec2 leftTextureCoordinate;\n"
"out vec2 rightTextureCoordinate;\n"
"out vec2 topTextureCoordinate;\n"
"out vec2 topLeftTextureCoordinate;\n"
"out vec2 topRightTextureCoordinate;\n"
"out vec2 bottomTextureCoordinate;\n"
"out vec2 bottomLeftTextureCoordinate;\n"
"out vec2 bottomRightTextureCoordinate;\n"
"void main()\n"
"{\n"
"gl_Position = a_position;\n"
"vec2 widthStep = vec2(texelWidth, 0.0);\n"
"vec2 heightStep = vec2(0.0, texelHeight);\n"
"vec2 widthHeightStep = vec2(texelWidth, texelHeight);\n"
"vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);\n"
"v_texCoord = a_texCoord;\n"
"leftTextureCoordinate = a_texCoord - widthStep;\n"
"rightTextureCoordinate = a_texCoord + widthStep;\n"
"\n"
"topTextureCoordinate = a_texCoord - heightStep;\n"
"topLeftTextureCoordinate = a_texCoord - widthHeightStep;\n"
"topRightTextureCoordinate = a_texCoord + widthNegativeHeightStep;\n"
"\n"
"bottomTextureCoordinate = a_texCoord +heightStep;\n"
"bottomLeftTextureCoordinate = a_texCoord - widthNegativeHeightStep;\n"
"bottomRightTextureCoordinate = a_texCoord + widthHeightStep;\n"
"}";
//kuwahara
const char fShaderStr3[] =
"#version 300 es\n"
"precision highp float;\n"
"layout(location = 0) out vec4 outColor;\n"
"in vec2 v_texCoord;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform vec2 u_texSize;\n"
"uniform highp float radius;\n"
"const vec2 src_size = vec2(1.0 / 768.0, 1.0 / 1024.0);\n"
"void main() {\n"
"vec2 uv = v_texCoord;\n"
"float n = float((radius + 1.0) * (radius + 1.0));\n"
"int i ; int j ;\n"
"vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);\n"
"vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);\n"
"vec3 c;\n"
"for (j = -int(radius); j <=0; ++j) {\n"
"for (i = -int(radius); i <=0; ++i) {\n"
"c = texture(s_TextureMap,uv + vec2(i,j) * src_size).rgb;\n"
"m0 += c;\n"
"s0 += c * c;\n"
"}\n"
"}\n"
"for (j = -int(radius); j <=0; ++j) {\n"
"for (i =0; i <=int(radius); ++i) {\n"
"c = texture(s_TextureMap,uv + vec2(i,j) * src_size).rgb;\n"
"m1 += c;\n"
"s1 += c * c;\n"
"}\n"
"}\n"
"for (j = 0; j <=int(radius); ++j) {\n"
"for (i = 0; i <= int(radius); ++i) {\n"
"c = texture(s_TextureMap,uv + vec2(i,j) * src_size).rgb;\n"
"m2 += c;\n"
"s2 += c * c;\n"
"}\n"
"}\n"
"for (j = 0; j <=int(radius); ++j) {\n"
"for (i = -int(radius); i <= 0; ++i) {\n"
"c = texture(s_TextureMap,uv + vec2(i,j) * src_size).rgb;\n"
"m3 += c;\n"
"s3 += c * c;\n"
"}\n"
"}\n"
"\n"
"\n"
"float min_sigma2 = 1e+2;\n"
"m0 /= n;\n"
"s0 = abs(s0 /n - m0 * m0);\n"
"\n"
"float sigma2 = s0.r + s0.g + s0.b;\n"
"if (sigma2 < min_sigma2) {\n"
"min_sigma2 = sigma2;\n"
"outColor = vec4(m0,1.0);\n"
"}\n"
"\n"
"m1 /= n;\n"
"s1 = abs(s1 / n -m1 * m1);\n"
"\n"
"sigma2 = s1.r + s1.g + s1.b;\n"
"if (sigma2 < min_sigma2) {\n"
"min_sigma2 = sigma2;\n"
"outColor = vec4(m1,1.0);\n"
"}\n"
"\n"
"m2 /= n;\n"
"s2 = abs(s2 / n -m2 * m2);\n"
"\n"
"sigma2 = s2.r + s2.g + s2.b;\n"
"if (sigma2 < min_sigma2) {\n"
"min_sigma2 = sigma2;\n"
"outColor = vec4(m2,1.0);\n"
"}\n"
"\n"
"m3 /= n;\n"
"s3 = abs(s3 / n -m3 * m3);\n"
"\n"
"sigma2 = s3.r + s3.g + s3.b;\n"
"if (sigma2 < min_sigma2) {\n"
"min_sigma2 = sigma2;\n"
"outColor = vec4(m3,1.0);\n"
"}\n"
"}\n";
//旋转
const char swirlFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"layout(location = 0) out vec4 outColor;\n"
"in vec2 v_texCoord;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform vec2 u_texSize;\n"
"uniform highp float radius;\n"
"uniform highp float angle;\n"
"uniform vec2 center;\n"
"void main() {\n"
"vec2 tc = v_texCoord * u_texSize;\n"
"tc -= center;\n"
"float dist = length(tc);\n"
"if (dist < radius) {\n"
"float percent = (radius - dist) / radius;\n"
"float theta = percent * percent * angle * 8.0;\n"
"float s = sin(theta);\n"
"float c = cos(theta);\n"
"tc = vec2(dot(tc, vec2(c, -s)), dot(tc, vec2(s, c)));\n"
"}\n"
"tc += center;\n"
"outColor = texture(s_TextureMap, tc / u_texSize);\n"
"}";
//亮度
const char brightFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform lowp float brightness;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"\n"
"outColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);\n"
"}";
//反转
const char contrastFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform lowp float contrast;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"\n"
"outColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);\n"
"}";
//invert
const char invertFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"int vec2 v_texCoord;\n"
"\n"
"uniform lowp sampler2D s_TextureMap;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"\n"
"outColor = vec4((1.0 - textureColor.rgb), textureColor.w);\n"
"}";
//pixel
const char pixelFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"uniform float imageWidthFactor;\n"
"uniform float imageHeightFactor;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform float pixel;\n"
"layout(location = 0) out vec4 outColor;\n"
"void main()\n"
"{\n"
"vec2 uv = v_texCoord.xy;\n"
"float dx = pixel * imageWidthFactor;\n"
"float dy = pixel * imageHeightFactor;\n"
"vec2 coord = vec2(dx * floor(uv.x / dx), dy * floor(uv.y / dy));\n"
"vec3 tc = texture(s_TextureMap, coord).xyz;\n"
"outColor = vec4(tc, 1.0);\n"
"}";
//vignette
const char vignetteFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"in vec2 v_texCoord;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"uniform lowp vec2 vignetteCenter;\n"
"uniform lowp vec3 vignetteColor;\n"
"uniform highp float vignetteStart;\n"
"uniform highp float vignetteEnd;\n"
"\n"
"void main()\n"
"{\n"
"lowp vec3 rgb = texture(s_TextureMap, v_texCoord).rgb;\n"
"lowp float d = distance(v_texCoord, vec2(0.5,0.5));\n"
"rgb *= (1.0 - smoothstep(vignetteStart , vignetteEnd, d));\n"
"outColor = vec4(vec3(rgb), 1.0);\n"
"\n"
"lowp vec3 rgb2 = texture(s_TextureMap, v_texCoord).rgb;\n"
"lowp float d2 = distance(v_texCoord, vec2(vignetteCenter.x, vignetteCenter.y));\n"
"lowp float percent = smoothstep(vignetteStart, vignetteEnd, d2);\n"
"outColor = vec4(mix(rgb2.x,vignetteColor.x,percent), mix(rgb2.y, vignetteColor.y, percent), mix(rgb2.z, vignetteColor.z, percent), 1.0);\n"
"}";
//ColorMatrix
const char colorMatrixFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"layout(location = 0) out vec4 outColor;\n"
"\n"
"uniform lowp mat4 colorMatrix;\n"
"uniform lowp float intensity;\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"lowp vec4 outputColor = textureColor * colorMatrix;\n"
"outColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);\n"
"}";
//toon
const char toonFShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"layout(location = 0) out vec4 outColor;\n"
"in vec2 leftTextureCoordinate;\n"
"in vec2 rightTextureCoordinate;\n"
"in vec2 topTextureCoordinate;\n"
"in vec2 topLeftTextureCoordinate;\n"
"in vec2 topRightTextureCoordinate;\n"
"in vec2 bottomTextureCoordinate;\n"
"in vec2 bottomLeftTextureCoordinate;\n"
"in vec2 bottomRightTextureCoordinate;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"uniform highp float intensity;\n"
"uniform highp float threshold;\n"
"uniform highp float quantizationLevels;\n"
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n"
"void main()\n"
"{\n"
"vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"float bottomLeftIntensity = texture(s_TextureMap, bottomLeftTextureCoordinate).r;\n"
"float topRightIntensity = texture(s_TextureMap, topRightTextureCoordinate).r;\n"
"float topLeftIntensity = texture(s_TextureMap, topLeftTextureCoordinate).r;\n"
"float bottomRightIntensity = texture(s_TextureMap, bottomRightTextureCoordinate).r;\n"
"float leftIntensity = texture(s_TextureMap, leftTextureCoordinate).r;\n"
"float rightIntensity = texture(s_TextureMap, rightTextureCoordinate).r;\n"
"float bottomIntensity = texture(s_TextureMap, bottomTextureCoordinate).r;\n"
"float topIntensity = texture(s_TextureMap, topTextureCoordinate).r;\n"
"float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity +2.0 * bottomIntensity + bottomRightIntensity;\n"
"float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity +2.0 * rightIntensity + topRightIntensity;\n"
"float mag = length(vec2(h, v));\n"
"\n"
"vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;\n"
"\n"
"float thresholdTest = 1.0 - step(threshold, mag);\n"
"\n"
"outColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);\n"
"}";
//grayScale
const char grayScaleShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"layout(location = 0) out vec4 outColor;\n"
"const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n"
"void main()\n"
"{\n"
"lowp vec4 textureColor = texture(s_TextureMap, v_texCoord);\n"
"float luminance = dot(textureColor.rgb, W);\n"
"outColor = vec4(vec3(luminance), textureColor.a);\n"
"}";
//sketch
const char sketchShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"layout(location = 0) out vec4 outColor;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"in vec2 textureCoordinate;\n"
"in vec2 leftTextureCoordinate;\n"
"in vec2 rightTextureCoordinate;\n"
"\n"
"in vec2 topTextureCoordinate;\n"
"in vec2 topLeftTextureCoordinate;\n"
"in vec2 topRightTextureCoordinate;\n"
"\n"
"in vec2 bottomTextureCoordinate;\n"
"in vec2 bottomLeftTextureCoordinate;\n"
"in vec2 bottomRightTextureCoordinate;\n"
"\n"
"void main()\n"
"{\n"
"float bottomLeftIntensity = texture(s_TextureMap, bottomLeftTextureCoordinate).r;\n"
"float topRightIntensity = texture(s_TextureMap, topRightTextureCoordinate).r;\n"
"float topLeftIntensity = texture(s_TextureMap, topLeftTextureCoordinate).r;\n"
"float bottomRightIntensity = texture(s_TextureMap, bottomRightTextureCoordinate).r;\n"
"float leftIntensity = texture(s_TextureMap, leftTextureCoordinate).r;\n"
"float rightIntensity = texture(s_TextureMap, rightTextureCoordinate).r;\n"
"float bottomIntensity = texture(s_TextureMap, bottomTextureCoordinate).r;\n"
"float topIntensity = texture(s_TextureMap, topTextureCoordinate).r;\n"
"float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity +2.0 * bottomIntensity + bottomRightIntensity;\n"
"float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity +2.0 * rightIntensity + topRightIntensity;\n"
"float mag = 1.0-length(vec2(h, v));\n"
"outColor = vec4(vec3(mag), 1.0);\n"
"}";
//blur
const char blurShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"uniform lowp sampler2D s_TextureMap;\n"
"in vec2 v_texCoord;\n"
"layout(location = 0) out vec4 outColor;\n"
"uniform highp int blurRadius;\n"
"uniform highp vec2 blurOffset;\n"
"\n"
"uniform highp float sumWeight;\n"
"float PI = 3.1415926;\n"
"float getWeight(int i)\n"
"{\n"
"float sigma = float(blurRadius) / 3.0 ;\n"
"return (1.0 / sqrt(2.0 * PI * sigma * sigma)) * exp(-float(i * i) / (2.0 * sigma * sigma)) / sumWeight;\n"
"}\n"
"vec2 clampCoordinate (vec2 coordinate)\n"
"{\n"
"return vec2(clamp(coordinate.x, 0.0, 1.0), clamp(coordinate.y, 0.0, 1.0));\n"
"}\n"
"\n"
"void main()\n"
"{\n"
"vec4 sourceColor = texture(s_TextureMap, v_texCoord);\n"
"if (blurRadius <= 1)\n"
"{\n"
"outColor = sourceColor;\n"
"return;\n"
"}\n"
"float weight = getWeight(0);\n"
"vec3 finalColor = sourceColor.rgb * weight;\n"
"for(int i = 1; i < blurRadius; i++) {\n"
"weight = getWeight(i);\n"
"finalColor += texture(s_TextureMap, clampCoordinate(v_texCoord - blurOffset * float(i))).rgb * weight;\n"
"finalColor += texture(s_TextureMap, clampCoordinate(v_texCoord + blurOffset * float(i))).rgb * weight;\n"
"}\n"
"outColor = vec4(finalColor, sourceColor.a);\n"
"}\n";
#endif //GPU_ImageETS_constant_shape_H

View File

@ -0,0 +1,99 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/20.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#include <js_native_api.h>
#include <js_native_api_types.h>
#include <node_api.h>
#include "DebugLog.h"
#include "native_common.h"
#include "EGLRender.h"
static napi_value Add(napi_env env, napi_callback_info info)
{
size_t requireArgc = 2;
size_t argc = 2;
napi_value args[2] = {nullptr};
napi_get_cb_info(env, info, &argc, args , nullptr, nullptr);
napi_valuetype valuetype0;
napi_typeof(env, args[0], &valuetype0);
napi_valuetype valuetype1;
napi_typeof(env, args[1], &valuetype1);
double value0;
napi_get_value_double(env, args[0], &value0);
double value1;
napi_get_value_double(env, args[1], &value1);
napi_value sum;
napi_create_double(env, value0 + value1, &sum);
return sum;
}
static napi_value Init(napi_env env, napi_value exports) {
napi_property_descriptor desc[] = {
DECLARE_NAPI_FUNCTION("EglRenderInit",EGLRender:: RenderInit),
DECLARE_NAPI_FUNCTION("EglRenderSetImageData",EGLRender:: RenderSetData),
DECLARE_NAPI_FUNCTION("EglPixelMapSurface",EGLRender:: GetPixelMapOfSurface),
DECLARE_NAPI_FUNCTION("EglRenderSetIntParams",EGLRender:: RenderSetIntParams),
DECLARE_NAPI_FUNCTION("EglIsInit",EGLRender:: EGLIsInit),
DECLARE_NAPI_FUNCTION("EglDestroy",EGLRender:: DestroyGlesEnv),
DECLARE_NAPI_FUNCTION("EglUseProgram",EGLRender:: StartUseProgram),
DECLARE_NAPI_FUNCTION("EglRendering",EGLRender:: Rendering),
DECLARE_NAPI_FUNCTION("EglUniform1i",EGLRender:: RenderGlUniform1i),
DECLARE_NAPI_FUNCTION("EglUniform1f",EGLRender:: RenderGlUniform1f),
DECLARE_NAPI_FUNCTION("EglUniform2fv",EGLRender:: RenderGlUniform2fv),
DECLARE_NAPI_FUNCTION("EglSetTypeArrayOfFloat",EGLRender:: setTypeArrayOfFloat),
DECLARE_NAPI_FUNCTION("EglSetTypeArrayOfMatrix3f",EGLRender:: setTypeArrayOfMatrix3f),
DECLARE_NAPI_FUNCTION("EglSetTypeArrayOfMatrix4f",EGLRender:: setTypeArrayOfMatrix4f),
};
NAPI_CALL(env, napi_define_properties(env, exports, sizeof(desc) / sizeof(desc[0]),desc));
return exports;
}
/**
* Napi Module define
*/
static napi_module nativeGpuModule = {
.nm_version =1,
.nm_flags = 0,
.nm_filename = nullptr,
.nm_register_func = Init,
.nm_modname = "nativeGpu",
.nm_priv = ((void*)0),
.reserved = { 0 },
};
extern "C" __attribute__((constructor)) void RegisterModule(void)
{
napi_module_register(&nativeGpuModule);
}

View File

@ -0,0 +1,822 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/20.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#include "EGLRender.h"
#include "native_common.h"
#include <js_native_api.h>
#include <napi/native_api.h>
#include <stdlib.h>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES3/gl3.h>
#include <stdint.h>
#include <js_native_api_types.h>
#include "DebugLog.h"
#include "constant/constant_shape.h"
#include "GLUtils.h"
#include "../util/NapiUtil.h"
#define PARAM_TYPE_SHADER_INDEX 300
#define VERTEX_POS_LOC 0
#define TEXTURE_POS_LOC 1
const int32_t STR_DEFAULT_SIZE = 1024;
EGLRender *EGLRender::sInstance = nullptr;
//顶点坐标
const GLfloat vVertices[] = {
-1.0f, -1.0f, 0.0f, //bottom left
1.0f, -1.0f, 0.0f, //bottom right
-1.0f, 1.0f, 0.0f, //top left
1.0f, 1.0f, 0.0f, //top right
};
//正常纹理坐标
const GLfloat vTexCoors[] = {
0.0f, 1.0f, //bottom left
1.0f, 1.0f, //bottom right
0.0f, 0.0f, //top left
1.0f, 0.0f, //top right
};
//fbo 纹理坐标与正常纹理方向不同(上下镜像)
const GLfloat vFboTexCoors[] = {
0.0f, 0.0f, //bottom left
1.0f, 0.0f, //bottom right
0.0f, 1.0f, //top left
1.0f, 1.0f, //top right
};
const GLushort indices[] = { 0, 1, 2, 1, 3, 2 };
napi_value EGLRender::RenderInit(napi_env env, napi_callback_info info) {
napi_value exports;
NAPI_CALL(env, napi_create_object(env, &exports));
napi_property_descriptor desc[] = {
};
NAPI_CALL(env, napi_define_properties(env, exports, sizeof(desc) / sizeof(desc[0]),desc));
EGLRender::GetInstance() ->Init();
return exports;
}
napi_value EGLRender::RenderSetData(napi_env env, napi_callback_info info) {
size_t argc = 3;
napi_value args[3] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
void* buffer;
size_t bufferLength;
napi_status buffStatus = napi_get_arraybuffer_info(env, args[0], &buffer, &bufferLength);
if (buffStatus != napi_ok) {
return nullptr;
}
uint8_t* uint8_buf = reinterpret_cast<uint8_t *>(buffer);
uint32_t width;
napi_status wStatus = napi_get_value_uint32(env, args[1], &width);
if (wStatus != napi_ok) {
return nullptr;
}
uint32_t height;
napi_status hStatus = napi_get_value_uint32(env, args[2], &height);
if (hStatus != napi_ok) {
return nullptr;
}
EGLRender::GetInstance() -> SetImageData(uint8_buf, width, height);
return nullptr;
}
napi_value EGLRender::RenderSetIntParams(napi_env env, napi_callback_info info) {
LOGI("gl--> RenderSetIntParams start");
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
uint32_t type;
napi_status tStatus = napi_get_value_uint32(env, args[0], &type);
if (tStatus != napi_ok) {
return nullptr;
}
uint32_t param;
napi_status pStatus = napi_get_value_uint32(env, args[1], &param);
if (pStatus != napi_ok) {
return nullptr;
}
EGLRender::GetInstance() -> SetIntParams(type, param);
return nullptr;
}
napi_value EGLRender::GetPixelMapOfSurface(napi_env env, napi_callback_info info) {
size_t argc = 4;
napi_value args[4] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
uint32_t x;
napi_status xStatus = napi_get_value_uint32(env, args[0], &x);
if (xStatus != napi_ok) {
return nullptr;
}
uint32_t y;
napi_status yStatus = napi_get_value_uint32(env, args[1], &y);
if (yStatus != napi_ok) {
return nullptr;
}
uint32_t surfaceWidth;
napi_status swStatus = napi_get_value_uint32(env, args[2], &surfaceWidth);
if (swStatus != napi_ok) {
return nullptr;
}
uint32_t surfaceHeight;
napi_status shStatus = napi_get_value_uint32(env, args[3], &surfaceHeight);
if (shStatus != napi_ok) {
return nullptr;
}
uint8_t* pixels = (uint8_t*) malloc(surfaceWidth * surfaceHeight * 4);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
glReadPixels(x, y, surfaceWidth, surfaceHeight, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
NativeImageUtil::flip(&pixels, surfaceWidth, surfaceHeight);
napi_value array;
int byte_length = surfaceWidth * surfaceHeight * 4;
if (!NativeImageUtil::CreateArrayBuffer(env, pixels, byte_length, &array)) {
LOGI("gl--> GetPixelMapOfSurface error");
}
free(pixels);
return array;
}
napi_value EGLRender::EGLIsInit(napi_env env, napi_callback_info info) {
napi_value isInit;
int32_t value;
if (EGLRender::GetInstance() -> m_IsGLContextReady) {
value = 1;
}else {
value = 0;
}
napi_status status = napi_create_int32(env, value, &isInit);
if (status != napi_ok) {
return nullptr;
}
return isInit;
}
napi_value EGLRender::DestroyGlesEnv(napi_env env, napi_callback_info info) {
EGLRender::GetInstance() -> UnInit();
return nullptr;
}
napi_value EGLRender::StartUseProgram(napi_env env, napi_callback_info info) {
EGLRender::GetInstance() -> UseProgram();
return nullptr;
}
napi_value EGLRender::Rendering(napi_env env, napi_callback_info info) {
//7 . 渲染
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, (const void *)0);
glBindVertexArray(GL_NONE);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
return nullptr;
}
napi_value EGLRender::setTypeArrayOfFloat(napi_env env, napi_callback_info info) {
size_t argc = 3;
napi_value args[3] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string locationContent;
NapiUtil::JsValueToString(env, args[0], 1024, locationContent);
char* location = (char*)locationContent.c_str();
std::string content;
NapiUtil::JsValueToString(env, args[1], 1024, content);
char* key = (char*)content.c_str();
napi_typedarray_type dataType = napi_float32_array;
void* buffer;
size_t bufferLength;
size_t byte_offset;
napi_status buffStatus = napi_get_typedarray_info(env, args[2], &dataType, &bufferLength, &buffer, &args[2], &byte_offset);
if (buffStatus != napi_ok){
return nullptr;
}
float* value = reinterpret_cast<float *>(buffer);
int uniformType;
if (strcmp(key, "glUniform2fv") == 0) {
uniformType = 2;
}else if (strcmp(key, "glUniform3fv") == 0) {
uniformType = 3;
}else if (strcmp(key, "glUniform4fv") == 0) {
uniformType = 4;
}else if (strcmp(key, "glUniform1fv") == 0) {
uniformType = 1;
}else if (strcmp(key, "glUniform2f") == 0) {
uniformType = 21;
}
EGLRender::GetInstance() -> GlUniformArray(location, value, uniformType);
return nullptr;
}
napi_value EGLRender::setTypeArrayOfMatrix3f(napi_env env, napi_callback_info info) {
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string locationContent;
NapiUtil::JsValueToString(env, args[0], 1024, locationContent);
char* location = (char*)locationContent.c_str();
napi_typedarray_type dataType = napi_float32_array;
void* buffer;
size_t bufferLength;
size_t byte_offset;
napi_status buffStatus = napi_get_typedarray_info(env, args[1], &dataType, &bufferLength, &buffer, &args[1], &byte_offset);
if (buffStatus != napi_ok){
return nullptr;
}
float* value = reinterpret_cast<float *>(buffer);
EGLRender::GetInstance() -> GlUniformMatrix(location, value, 3);
return nullptr;
}
napi_value EGLRender::setTypeArrayOfMatrix4f(napi_env env, napi_callback_info info) {
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string locationContent;
NapiUtil::JsValueToString(env, args[0], 1024, locationContent);
char* location = (char*)locationContent.c_str();
napi_typedarray_type dataType = napi_float32_array;
void* buffer;
size_t bufferLength;
size_t byte_offset;
napi_status buffStatus = napi_get_typedarray_info(env, args[1], &dataType, &bufferLength, &buffer, &args[1], &byte_offset);
if (buffStatus != napi_ok){
return nullptr;
}
float* value = reinterpret_cast<float *>(buffer);
EGLRender::GetInstance() -> GlUniformMatrix(location, value, 4);
return nullptr;
}
napi_value EGLRender::RenderGlUniform1i(napi_env env, napi_callback_info info) {
//int
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string content;
NapiUtil::JsValueToString(env, args[0], 1024, content);
uint32_t value;
napi_status status = napi_get_value_uint32(env, args[1], &value);
if (status != napi_ok) {
return nullptr;
}
EGLRender::GetInstance() -> GlUniform((char*)content.c_str(), value, 0);
return nullptr;
}
napi_value EGLRender::RenderGlUniform1f(napi_env env, napi_callback_info info) {
//float
size_t argc = 2;
napi_value args[2] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string content;
NapiUtil::JsValueToString(env, args[0], 1024, content);
double value;
napi_status status = napi_get_value_double(env, args[1], &value);
if (status != napi_ok) {
return nullptr;
}
EGLRender::GetInstance() -> GlUniform((char*)content.c_str(), value, 1);
return nullptr;
}
napi_value EGLRender::RenderGlUniform2fv(napi_env env, napi_callback_info info) {
//float 数组
size_t argc = 3;
napi_value args[3] = { nullptr };
napi_get_cb_info(env, info, &argc, args, nullptr, nullptr);
std::string content;
NapiUtil::JsValueToString(env, args[0], 1024, content);
double value;
napi_status status = napi_get_value_double(env, args[1], &value);
if (status != napi_ok) {
return nullptr;
}
double value2;
napi_status status2 = napi_get_value_double(env, args[2], &value2);
if (status2 != napi_ok) {
return nullptr;
}
float vce2[2];
vce2[0] = value;
vce2[1] = value2;
EGLRender::GetInstance() -> GlUniformArray((char*)content.c_str(), vce2, 10);
return nullptr;
}
EGLRender::EGLRender() {
m_ImageTextureId = GL_NONE;
m_FboTextureId = GL_NONE;
m_SamplerLoc = GL_NONE;
m_TexSizeLoc = GL_NONE;
m_FboId = GL_NONE;
m_ProgramObj = GL_NONE;
m_VertexShader = GL_NONE;
m_FragmentShader = GL_NONE;
m_eglDisplay = nullptr;
m_IsGLContextReady = false;
m_ShaderIndex = 0;
}
EGLRender::~EGLRender() {
}
void EGLRender::Init() {
if (CreateGlEnv() == 0)
{
m_IsGLContextReady = true;
}
if (!m_IsGLContextReady) {
return;
}
glGenTextures(1, &m_ImageTextureId); //生成纹理名称
glBindTexture(GL_TEXTURE_2D, m_ImageTextureId); //允许建立一个绑定到目标纹理的有名称的纹理
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
glGenTextures(1, &m_FboTextureId);
glBindTexture(GL_TEXTURE_2D, m_FboTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
m_ProgramObj = GLUtils::CreateProgram(vShaderStr, fShaderStr0, m_VertexShader,
m_FragmentShader);
if (!m_ProgramObj) {
GLUtils::CheckGLError("Create Program");
return;
}
// Generate VBO Ids and load the VBOs width data
glGenBuffers(3, m_VboIds);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vVertices), vVertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vFboTexCoors), vTexCoors, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_VboIds[2]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
// Generate VAO Ids
glGenVertexArrays(1, m_VaoIds);
// FBO off screen rendering VAO
glBindVertexArray(m_VaoIds[0]);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[0]);
glEnableVertexAttribArray(VERTEX_POS_LOC);
glVertexAttribPointer(VERTEX_POS_LOC, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), (const void *)0);
glBindBuffer(GL_ARRAY_BUFFER,GL_NONE);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[1]);
glEnableVertexAttribArray(TEXTURE_POS_LOC);
glVertexAttribPointer(TEXTURE_POS_LOC, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), (const void *)0);
glBindBuffer(GL_ARRAY_BUFFER,GL_NONE);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_VboIds[2]);
glBindVertexArray(GL_NONE);
}
int EGLRender::CreateGlEnv() {
const EGLint confAttr[] =
{
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT_KHR,
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_DEPTH_SIZE, 16,
EGL_STENCIL_SIZE, 8,
EGL_NONE
};
// EGL context attributes
const EGLint ctxAttr[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
// surface attributes
// the surface size is set to the input frame size
const EGLint surfaceAttr[] = {
EGL_WIDTH, 1,
EGL_HEIGHT, 1,
EGL_NONE
};
EGLint eglMajVers, eglMinVers;
EGLint numConfigs;
int resultCode = 0;
do
{
m_eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (m_eglDisplay == EGL_NO_DISPLAY) {
resultCode = -1;
break;
}
// 初始化 egl 方法
if (!eglInitialize(m_eglDisplay, &eglMajVers, &eglMinVers)) {
resultCode = -1;
break;
}
//获取 EGLConfig 对象,确定渲染表面的配置信息
if (!eglChooseConfig(m_eglDisplay, confAttr, &m_eglConf, 1, &numConfigs)) {
resultCode = -1;
break;
}
// 创建渲染表面 EGLSurface 使用 eglCreateBufferSurface 创建屏幕外渲染区域
m_eglSurface = eglCreatePbufferSurface(m_eglDisplay, m_eglConf, surfaceAttr);
if (m_eglSurface == EGL_NO_SURFACE) {
switch(eglGetError())
{
case EGL_BAD_ALLOC:
LOGI("gl-->::CreateGlesEnv Not enough resources available");
break;
case EGL_BAD_CONFIG:
LOGI("gl-->::CreateGlesEnv provided EGLConfig is invalid");
break;
case EGL_BAD_PARAMETER:
LOGI("gl-->::CreateGlesEnv provided EGL_WIDTH and EGL_HEIGHT is invalid");
break;
case EGL_BAD_MATCH:
LOGI("gl-->::CreateGlesEnv Check window and EGLConfig attributes");
break;
}
}
// 创建渲染上下文 EGLContext
m_eglCtx = eglCreateContext(m_eglDisplay, m_eglConf, EGL_NO_CONTEXT, ctxAttr);
if (m_eglCtx == EGL_NO_CONTEXT)
{
EGLint error = eglGetError();
if (error == EGL_BAD_CONFIG)
{
LOGI("gl-->::CreateGlesEnv EGL_BAD_CONFIG");
resultCode = -1;
break;
}
}
//绑定上下文
if(!eglMakeCurrent(m_eglDisplay, m_eglSurface, m_eglSurface, m_eglCtx))
{
resultCode = -1;
break;
}
LOGI("gl-->::CreateGlesEnv initialize success");
} while (false);
if(resultCode != 0)
{
LOGI("gl-->::CreateGlesEnv fail");
}
return resultCode;
}
void EGLRender::SetImageData(uint8_t *pData, int width, int height) {
if (pData && m_IsGLContextReady)
{
if (m_RenderImage.ppPlane[0])
{
NativeImageUtil::FreeNativeImage(&m_RenderImage);
m_RenderImage.ppPlane[0] = nullptr;
}
m_RenderImage.width = width;
m_RenderImage.height = height;
m_RenderImage.format = IMAGE_FORMAT_RGBA;
NativeImageUtil::AllocNativeImage(&m_RenderImage);
memcpy(m_RenderImage.ppPlane[0], pData, width * height *4);
glBindTexture(GL_TEXTURE_2D, m_ImageTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_RenderImage.width, m_RenderImage.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[0]);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
if (m_FboId == GL_NONE)
{
// Create FBO
glGenFramebuffers(1, &m_FboId);
glBindFramebuffer(GL_FRAMEBUFFER, m_FboId);
glBindTexture(GL_TEXTURE_2D, m_FboTextureId);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_FboTextureId, 0);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_RenderImage.width, m_RenderImage.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
LOGI("gl--> EGLRender ::SetImageData glCheckFramebufferStatus status != GL_FRAMEBUFFER_COMPLETE");
}
glBindTexture(GL_TEXTURE_2D, GL_NONE);
glBindFramebuffer(GL_FRAMEBUFFER, GL_NONE);
}
LOGI("gl--> :: SetImageData end");
}
}
void EGLRender::SetIntParams(int paramType, int param) {
LOGI("gl--> EGLRender::SetIntParams paramType = %{public}d,param = %{public}d", paramType, param);
switch(paramType)
{
case PARAM_TYPE_SHADER_INDEX:
{
if (param >= 0)
{
if (m_ProgramObj)
{
glDeleteProgram(m_ProgramObj);
m_ProgramObj = GL_NONE;
}
const char* vShader[1];
vShader[0] = vShaderStr;
const char* fShader[1];
switch(param)
{
case 4:
{
fShader[0] = fShaderStr3;
break;
}
case 7:
{
fShader[0] = swirlFShaderStr;
break;
}
case 0:
{
fShader[0] = brightFShaderStr;
break;
}
case 1:
{
fShader[0] = contrastFShaderStr;
break;
}
case 2:
{
fShader[0] = invertFShaderStr;
break;
}
case 3:
{
fShader[0] = pixelFShaderStr;
break;
}
case 5:
{
fShader[0] = colorMatrixFShaderStr;
break;
}
case 6:
{
fShader[0] = sketchShaderStr;
vShader[0] = v3x3ShaderStr;
break;
}
case 8:
{
fShader[0] = toonFShaderStr;
vShader[0] = v3x3ShaderStr;
break;
}
case 9:
{
fShader[0] = vignetteFShaderStr;
break;
}
case 10:
{
fShader[0] = grayScaleShaderStr;
break;
}
case 12:
{
fShader[0] = blurShaderStr;
break;
}
}
m_ProgramObj = GLUtils::CreateProgram(vShader[0], fShader[0], m_VertexShader,
m_FragmentShader);
if (!m_ProgramObj)
{
GLUtils::CheckGLError("Create Program");
LOGI("gl--> EGLRender::SetIntParams Could not create program.");
return;
}
m_SamplerLoc = glGetUniformLocation(m_ProgramObj, "s_TextureMap");
m_TexSizeLoc = glGetUniformLocation(m_ProgramObj, "u_texSize");
}
}
break;
default:
break;
}
}
void EGLRender::UseProgram() {
if (m_ProgramObj == GL_NONE) return;
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_STENCIL_BUFFER_BIT | GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, m_RenderImage.width, m_RenderImage.height);
//DO FBO off screen rendering
glUseProgram(m_ProgramObj);
glBindFramebuffer(GL_FRAMEBUFFER, m_FboId);
glBindVertexArray(m_VaoIds[0]);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_ImageTextureId);
glUniform1i(m_SamplerLoc, 0);
if (m_TexSizeLoc > -1) {
GLfloat size[2];
size[0] = m_RenderImage.width;
size[1] = m_RenderImage.height;
glUniform2f(m_TexSizeLoc, size[0], size[1]);
}
}
void EGLRender::GlUniform(char* strLocation, float value, int unType) {
GLint location = glGetUniformLocation(m_ProgramObj, strLocation);
switch(unType)
{
case 0:
glUniform1i(location, (int)value);
break;
case 1:
glUniform1f(location, value);
break;
}
}
void EGLRender::GlUniformArray(char* strLocation, float* value, int unType) {
GLint location = glGetUniformLocation(m_ProgramObj, strLocation);
switch(unType)
{
case 10:
GLfloat vec2[2];
vec2[0] = value[0] * m_RenderImage.width;
vec2[1] = value[1] * m_RenderImage.height;
glUniform2fv(location, 1, vec2);
break;
case 21:
glUniform2f(location, value[0], value[1]);
break;
case 1:
glUniform1fv(location, 1, value);
break;
case 2:
glUniform2fv(location, 1, value);
break;
case 3:
glUniform3fv(location, 1, value);
break;
case 4:
glUniform4fv(location, 1, value);
break;
}
}
void EGLRender::GlUniformMatrix(char* strLocation, float* value, int unType) {
GLint location = glGetUniformLocation(m_ProgramObj, strLocation);
switch(unType)
{
case 3:
glUniformMatrix3fv(location, 1, false,value);
break;
case 4:
glUniformMatrix4fv(location, 1, false, value);
break;
}
}
void EGLRender::UnInit() {
if (m_ProgramObj)
{
glDeleteProgram(m_ProgramObj);
m_ProgramObj = GL_NONE;
}
if(m_ImageTextureId)
{
glDeleteTextures(1, &m_ImageTextureId);
m_ImageTextureId = GL_NONE;
}
if (m_FboTextureId)
{
glDeleteTextures(1, &m_FboTextureId);
m_FboTextureId = GL_NONE;
}
if (m_VboIds[0])
{
glDeleteBuffers(3, m_VboIds);
m_VboIds[0] = GL_NONE;
m_VboIds[1] = GL_NONE;
m_VboIds[2] = GL_NONE;
}
if (m_VaoIds[0])
{
glDeleteVertexArrays(1, m_VaoIds);
m_VaoIds[0] = GL_NONE;
}
if (m_FboId)
{
glDeleteFramebuffers(1, &m_FboId);
m_FboId = GL_NONE;
}
if (m_IsGLContextReady)
{
DestroyGl();
m_IsGLContextReady = false;
}
}
void EGLRender::DestroyGl() {
//释放 EGL 环境
if (m_eglDisplay != EGL_NO_DISPLAY) {
eglMakeCurrent(m_eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
eglDestroyContext(m_eglDisplay, m_eglCtx);
eglDestroySurface(m_eglDisplay, m_eglSurface);
eglReleaseThread();
eglTerminate(m_eglDisplay);
}
m_eglDisplay = EGL_NO_DISPLAY;
m_eglSurface = EGL_NO_SURFACE;
m_eglCtx = EGL_NO_CONTEXT;
}

View File

@ -0,0 +1,120 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/20.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#ifndef GPU_ImageETS_EGLRender_H
#define GPU_ImageETS_EGLRender_H
#include <EGL/egl.h>
#include <GLES3/gl3.h>
#include <js_native_api.h>
#include <napi/native_api.h>
#include <stdint.h>
#include "NativeImage.h"
#define EGL_FEATURE_NUM 7
class EGLRender {
public:
EGLRender();
~EGLRender();
static EGLRender* GetInstance() {
if(sInstance == nullptr) {
sInstance = new EGLRender();
}
return sInstance;
}
static void DestroyRender() {
if(sInstance) {
delete sInstance;
sInstance = nullptr;
}
}
static napi_value RenderInit(napi_env env, napi_callback_info info);
static napi_value RenderSetData(napi_env env, napi_callback_info info);
static napi_value GetPixelMapOfSurface(napi_env env, napi_callback_info info);
static napi_value RenderSetIntParams(napi_env env, napi_callback_info info);
static napi_value EGLIsInit(napi_env env, napi_callback_info info);
static napi_value DestroyGlesEnv(napi_env env, napi_callback_info info);
static napi_value StartUseProgram(napi_env env, napi_callback_info info);
static napi_value Rendering(napi_env env, napi_callback_info info);
static napi_value RenderGlUniform1i(napi_env env, napi_callback_info info);
static napi_value RenderGlUniform1f(napi_env env, napi_callback_info info);
static napi_value RenderGlUniform2fv(napi_env env, napi_callback_info info);
static napi_value setTypeArrayOfFloat(napi_env env, napi_callback_info info);
static napi_value setTypeArrayOfMatrix3f(napi_env env, napi_callback_info info);
static napi_value setTypeArrayOfMatrix4f(napi_env env, napi_callback_info info);
void Init();
int CreateGlEnv();
void SetImageData(uint8_t *pData, int width, int height);
void SetIntParams(int paramType, int param);
void UseProgram();
void Draw();
void GlUniform(char* location, float value, int unType);
void GlUniformArray(char* location, float* value, int unType);
void GlUniformMatrix(char* location, float* value, int unType);
void DestroyGl();
void UnInit();
private:
static EGLRender* sInstance;
GLuint m_ImageTextureId;
GLuint m_FboTextureId;
GLuint m_FboId;
GLuint m_VaoIds[1] = {GL_NONE};
GLuint m_VboIds[3] = {GL_NONE};
GLint m_SamplerLoc;
GLint m_TexSizeLoc;
NativeImage m_RenderImage;
GLuint m_ProgramObj;
GLuint m_VertexShader;
GLuint m_FragmentShader;
EGLConfig m_eglConf;
EGLSurface m_eglSurface;
EGLContext m_eglCtx;
EGLDisplay m_eglDisplay;
bool m_IsGLContextReady;
const char* m_fShaderStrs[EGL_FEATURE_NUM];
int m_ShaderIndex;
};
#endif //GPU_ImageETS_EGLRender_H

View File

@ -0,0 +1,16 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export const add: (a: number, b: number) => number;

View File

@ -0,0 +1,4 @@
{
"name": "libentry.so",
"types": "./index.d.ts"
}

View File

@ -0,0 +1,32 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#ifndef GPU_ImageETS_DebugLog_H
#define GPU_ImageETS_DebugLog_H
#include <Hilog/log.h>
#define LOGI(...)((void)OH_LOG_Print(LOG_APP, LOG_INFO, LOG_DOMAIN, "OH_GPU_LOG", __VA_ARGS__))
#define LOGD(...)((void)OH_LOG_Print(LOG_APP, LOG_DEBUG, LOG_DOMAIN, "OH_GPU_LOG", __VA_ARGS__))
#define LOGW(...)((void)OH_LOG_Print(LOG_APP, LOG_WARN, LOG_DOMAIN, "OH_GPU_LOG", __VA_ARGS__))
#define LOGE(...)((void)OH_LOG_Print(LOG_APP, LOG_ERROR, LOG_DOMAIN, "OH_GPU_LOG", __VA_ARGS__))
#endif //GPU_ImageETS_DebugLog_H

View File

@ -0,0 +1,109 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#include "GLUtils.h"
#include "DebugLog.h"
#include <malloc.h>
#include <stddef.h>
GLuint GLUtils::LoadShader(GLenum shaderType, const char *pSource)
{
GLuint shader = 0;
shader = glCreateShader(shaderType);
if (shader)
{
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen)
{
char* buf = (char*) malloc((size_t)infoLen);
if (buf)
{
glGetShaderInfoLog(shader, infoLen, NULL, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint GLUtils::CreateProgram(const char *pVertexShaderSource, const char *pFragShaderSource, GLuint &vertexShaderHandle, GLuint &fragShaderHandle)
{
GLuint program = 0;
vertexShaderHandle = LoadShader(GL_VERTEX_SHADER, pVertexShaderSource);
if (!vertexShaderHandle) return program;
fragShaderHandle = LoadShader(GL_FRAGMENT_SHADER, pFragShaderSource);
if (!fragShaderHandle) return program;
program = glCreateProgram();
if(program)
{
glAttachShader(program, vertexShaderHandle);
CheckGLError("glAttachShader");
glAttachShader(program, fragShaderHandle);
CheckGLError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
glDetachShader(program, vertexShaderHandle);
glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
glDetachShader(program, fragShaderHandle);
glDeleteShader(fragShaderHandle);
fragShaderHandle = 0;
if(linkStatus != GL_TRUE)
{
GLint bufLength = 0;
glGetProgramiv(program,GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength)
{
char* buf = (char*) malloc((size_t)bufLength);
if (buf)
{
glGetProgramInfoLog(program, bufLength, NULL, buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
void GLUtils::CheckGLError(const char *pGLOperation)
{
for(GLint error = glGetError(); error; error = glGetError())
{
LOGI("GLUtils::CheckGLError GL Operation %{public}s() glError (0x%x)\n", pGLOperation, error);
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#ifndef GPU_ImageETS_GLUtils_H
#define GPU_ImageETS_GLUtils_H
#include <GLES3/gl3.h>
class GLUtils {
public:
static GLuint LoadShader(GLenum shaderType, const char *pSource);
static GLuint CreateProgram(const char *pVertexShaderSource, const char *pFragShaderSource,
GLuint &vertexShaderHandle,
GLuint &fragShaderHandle);
static GLuint CreateProgram(const char *pVertexShaderSource, const char *pFragShaderSource);
static void CheckGLError(const char *pGLOperation);
};
#endif //GPU_ImageETS_GLUtils_H

View File

@ -0,0 +1,49 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#include "NapiUtil.h"
#include "DebugLog.h"
#include <codecvt>
#include <cstdio>
#include <locale>
#include <string>
#include <string.h>
const int32_t MAX_STR_LENGTH = 1024;
void NapiUtil::JsValueToString(const napi_env &env, const napi_value &value, const int32_t bufLen, std::string &target)
{
if (bufLen <= 0 || bufLen > MAX_STR_LENGTH) {
LOGI("%s string too long malloc failed",__func__);
return;
}
std::unique_ptr <char[]> buf = std::make_unique <char[]>(bufLen);
if (buf.get() == nullptr)
{
LOGI("%s nullptr js object to string malloc failed",__func__);
return;
}
(void) memset(buf.get(), 0, bufLen);
size_t result = 0;
napi_get_value_string_utf8(env, value, buf.get(), bufLen, &result);
target = buf.get();
}

View File

@ -0,0 +1,35 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#ifndef GPU_ImageETS_NapiUtil_H
#define GPU_ImageETS_NapiUtil_H
#include <string>
#include <napi/native_api.h>
#include "native_common.h"
class NapiUtil {
public:
static void JsValueToString(const napi_env &env, const napi_value &value, const int32_t bufLen,
std::string &target);
};
#endif //GPU_ImageETS_NapiUtil_H

View File

@ -0,0 +1,162 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created on 2022/12/21.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".
#ifndef GPU_ImageETS_NativeImage_H
#define GPU_ImageETS_NativeImage_H
#include <stdint.h>
#include <malloc.h>
#include <js_native_api.h>
#include <js_native_api_types.h>
#include <node_api.h>
#include <unistd.h>
#include "DebugLog.h"
#include <string.h>
#define IMAGE_FORMAT_RGBA 0x01
#define IMAGE_FORMAT_NV21 0x02
#define IMAGE_FORMAT_NV12 0x03
#define IMAGE_FORMAT_I420 0x04
#define IMAGE_FORMAT_YUYV 0x05
#define IMAGE_FORMAT_GRAY 0x06
#define IMAGE_FORMAT_I444 0x07
#define IMAGE_FORMAT_P010 0x08
#define IMAGE_FORMAT_RGBA_EXT "RGB32"
#define IMAGE_FORMAT_NV21_EXT "NV21"
#define IMAGE_FORMAT_NV12_EXT "NV12"
#define IMAGE_FORMAT_I420_EXT "I420"
#define IMAGE_FORMAT_YUYV_EXT "YUYV"
#define IMAGE_FORMAT_GRAY_EXT "GRAY"
#define IMAGE_FORMAT_I444_EXT "I444"
#define IMAGE_FORMAT_P010_EXT "P010" // 16bit NV21
struct NativeImage
{
int width;
int height;
int format;
uint8_t *ppPlane[3];
NativeImage()
{
width = 0;
height = 0;
format = 0;
ppPlane[0] = nullptr;
ppPlane[1] = nullptr;
ppPlane[2] = nullptr;
}
};
class NativeImageUtil
{
public:
static void AllocNativeImage(NativeImage *pImage)
{
if (pImage ->height ==0 || pImage ->width == 0) return;
switch(pImage -> format)
{
case IMAGE_FORMAT_RGBA:
{
pImage->ppPlane[0] = static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * 4));
}
break;
case IMAGE_FORMAT_YUYV:
{
pImage->ppPlane[0] = static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * 2));
}
break;
case IMAGE_FORMAT_NV12:
case IMAGE_FORMAT_NV21:
{
pImage->ppPlane[0] = static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * 1.5));
pImage->ppPlane[1] = pImage->ppPlane[0] + pImage->width * pImage->height;
}
break;
case IMAGE_FORMAT_I420:
{
pImage->ppPlane[0] = static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * 1.5));
pImage->ppPlane[1] = pImage->ppPlane[0] + pImage->width * pImage->height;
pImage->ppPlane[2] = pImage->ppPlane[1] + pImage->width * (pImage->height >> 2);
}
break;
case IMAGE_FORMAT_GRAY:
{
pImage->ppPlane[0] = static_cast<uint8_t *>(malloc(pImage->width * pImage ->height));
}
break;
case IMAGE_FORMAT_I444:
{
pImage->ppPlane[0] = static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * 3));
}
break;
case IMAGE_FORMAT_P010:
{
pImage->ppPlane[0] = static_cast<uint8_t *>(malloc(pImage->width * pImage ->height * 3));
pImage->ppPlane[1] = pImage->ppPlane[0] + pImage->width * pImage->height * 2;
}
break;
default:
break;
}
}
static void FreeNativeImage(NativeImage *pImage)
{
if (pImage == nullptr || pImage->ppPlane[0] == nullptr) return;
free(pImage->ppPlane[0]);
pImage->ppPlane[0] = nullptr;
pImage->ppPlane[1] = nullptr;
pImage->ppPlane[2] = nullptr;
}
static bool CreateArrayBuffer(napi_env env, void* src, size_t srcLen, napi_value *res)
{
if (src == nullptr || srcLen == 0)
{
return false;
}
void *nativePtr = nullptr;
if (napi_create_arraybuffer(env, srcLen, &nativePtr, res) != napi_ok || nativePtr == nullptr)
{
return false;
}
memcpy(nativePtr, src, srcLen);
return true;
}
static void flip(uint8_t** buf, int width, int height)
{
int totalLength = width * height * 4;
int oneLineLength = width * 4;
uint8_t* tmp = (uint8_t*)malloc(totalLength);
memcpy(tmp, *buf, totalLength);
memset(*buf, 0, sizeof(uint8_t)*totalLength);
for(int i = 0; i < height; i++) {
memcpy(*buf + oneLineLength * i, tmp + totalLength - oneLineLength * (i+1), oneLineLength);
}
free(tmp);
}
};
#endif //GPU_ImageETS_NativeImage_H

View File

@ -0,0 +1,51 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImage3x3TextureSamplingFilter extends GPUImageFilter{
private texelWidth:number;
private texelHeight:number;
private lineSize:number = 1.0;
constructor(){
super();
}
getFilterType():GPUFilterType{
return GPUFilterType.X3TEXTURE;
}
onInitialized(){
}
onReadySize(){
}
setLineSize(lineSize:number){
this.lineSize = lineSize;
}
setTexelWidth(texelWidth:number){
this.texelWidth = this.lineSize/texelWidth;
this.setFloat("texelWidth",this.texelWidth);
}
setTexelHeight(texelHeight:number){
this.texelHeight = this.lineSize/texelHeight;
this.setFloat("texelHeight",this.texelHeight);
}
}

View File

@ -0,0 +1,81 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImageBlurFilter extends GPUImageFilter{
private blurRadius:number;
private blurOffset:Array<number>;
private sumWeight:number;
constructor(){
super();
}
getFilterType():GPUFilterType{
return GPUFilterType.BLUR;
}
onInitialized(){
}
onReadySize(){
}
setBlurRadius(blurRadius:number){
this.blurRadius = blurRadius;
this.setInteger("blurRadius",this.blurRadius);
this.calculateSumWeight();
}
setBlurOffset(blurOffset:Array<number>){
let offset = new Array<number>(2);
if (this.width<=0||this.height<=0) {
throw new Error("the width or height must be greater than 0");
}
if (!blurOffset || blurOffset.length!==2) {
throw new Error("you should a valid value needs to be set.")
}
offset[0] = blurOffset[0]/this.width;
offset[1] = blurOffset[1] / this.height;
this.blurOffset = offset;
this.setFloat2f("blurOffset",this.blurOffset);
}
setSumWeight(sumWeight:number){
this.sumWeight = sumWeight;
this .setFloat("sumWeight",this.sumWeight);
}
private calculateSumWeight(){
if (this.blurRadius < 1) {
this.setSumWeight(0);
return;
}
let sumWeight = 0;
let sigma = this.blurRadius / 3.0;
for (let i = 0; i < this.blurRadius; i++) {
let weight = ((1.0/Math.sqrt(2.0*Math.PI*sigma*sigma))*Math.exp(-(i*i)/(2.0*sigma*sigma)));
sumWeight += weight;
if (i != 0) {
sumWeight +=weight;
}
}
this.setSumWeight(sumWeight);
}
}

View File

@ -0,0 +1,44 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImageBrightnessFilter extends GPUImageFilter{
private brightness:number = 25
constructor(brightness?:number){
super()
if (brightness) {
this.brightness =brightness;
}
}
getFilterType():GPUFilterType{
return GPUFilterType.BRIGHT;
}
onInitialized(){
}
onReadySize(){
}
setBrightness(brightness:number){
this.brightness = brightness;
this.setFloat("brightness",this.brightness);
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImageColorInvertFilter extends GPUImageFilter{
constructor(){
super()
}
getFilterType():GPUFilterType{
return GPUFilterType.INVERT;
}
onInitialized(){
}
onReadySize(){
}
}

View File

@ -0,0 +1,57 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImageColorMatrixFilter extends GPUImageFilter{
private intensity :number = 1.0;
private colorMatrix:Array<number> = [
1.0,0.0,0.0,0.0,
0.0,1.0,0.0,0.0,
0.0,0.0,1.0,0.0,
0.0,0.0,0.0,1.0
]
constructor(intensity?:number){
super()
if (intensity) {
this.intensity = intensity;
}
}
getFilterType():GPUFilterType{
return GPUFilterType.CONTRAST;
}
onInitialized(){
}
onReadySize(){
}
setIntensity(intensity:number){
this.intensity = intensity;
this.setFloat("intensity",this.intensity);
}
setColorMatrix(colorMatrix:Array<number>){
this.colorMatrix = colorMatrix;
this.setUniformMatrix4f("colorMatrix",this.colorMatrix);
}
}

View File

@ -0,0 +1,46 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImageContrastFilter extends GPUImageFilter{
private contrast :number = 1.0;
constructor(contrast?:number){
super()
if (contrast) {
this.contrast = contrast;
}
}
getFilterType():GPUFilterType{
return GPUFilterType.CONTRAST;
}
onInitialized(){
}
onReadySize(){
}
setContrast(contrast:number){
this.contrast = contrast;
this.setFloat("contrast",this.contrast);
}
}

View File

@ -0,0 +1,331 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {NativeEglRender} from '../gl/NativeEglRender'
import {GPUFilterType} from '../gl/GPUFilterType'
import {Runnable} from '../interface/Runnable'
import image from '@ohos.multimedia.image';
import LinkedList from '@ohos.util.LinkedList';
import ArrayList from '@ohos.util.ArrayList';
export abstract class GPUImageFilter{
private render:NativeEglRender;
private isInitialized:boolean;
private runOnDraw:LinkedList<Runnable>;
protected width:number;
protected height:number;
constructor(){
this.render = new NativeEglRender();
this.runOnDraw = new LinkedList();
}
private init(){
if (this.render) {
this.render.native_EglRenderInit();
}
this.onInitialized();
}
protected setSurfaceFilterType(){
let filter = this .getFilterType();
let filterType :number;
switch(filter){
case GPUFilterType.BRIGHT:
filterType = 0;
break;
case GPUFilterType.CONTRAST:
filterType = 1;
break;
case GPUFilterType.INVERT:
filterType = 2;
break;
case GPUFilterType.PIXELATION:
filterType = 3;
break;
case GPUFilterType.KUWAHARA:
filterType = 4;
break;
case GPUFilterType.SEPIA:
filterType = 5;
break;
case GPUFilterType.SKETCH:
filterType = 6;
break;
case GPUFilterType.SWIRL:
filterType = 7;
break;
case GPUFilterType.TOON:
filterType = 8;
break;
case GPUFilterType.VIGNETTE:
filterType = 9;
break;
case GPUFilterType.GRAYSCALE:
filterType = 10;
break;
case GPUFilterType.X3TEXTURE:
filterType = 11;
break;
case GPUFilterType.BLUR:
filterType = 12;
break;
case GPUFilterType.COLOR_M:
filterType = 100;
break;
}
if (!this.render.native_glIsInit()) {
throw new Error("the egl surface not init");
}
this.render.native_EglRenderSetIntParams(300,filterType);
}
setImageData(buf:ArrayBuffer,width:number,height:number){
if (!buf) {
throw new Error("this pixelMap data is empty");
}
if (width <= 0 || height <= 0) {
throw new Error("this pixelMap width and height is invalidation")
}
this.width = width;
this.height = height;
this.ifNeedInit();
this.onReadySize();
this.setSurfaceFilterType();
this.render.native_EglRenderSetImageData(buf,width,height);
}
protected onDraw(){
if (!this.render.native_glIsInit()) {
throw new Error("the egl surface not init")
}
this.render.native_EglUseProgram();
this.runPendingOnDrawTasks();
this.onRendering();
}
protected onRendering(){
this.render.native_EglRendering();
}
getPixelMapBuf(x:number,y:number,width:number,height:number):Promise<ArrayBuffer>{
if (x<0||y<0) {
throw new Error("the x or y should be greater than 0")
}
if (width<=0||height<=0) {
throw new Error("the width or height should be greater than 0")
}
let that = this;
return new Promise((resolve,rejects)=>{
that.onDraw();
let buf = this .render.native_EglBitmapFromGLSurface(x,y,width,height);
if (!buf) {
rejects(new Error("get pixelMap fail"))
}else{
resolve(buf);
that.destroy();
}
})
}
ifNeedInit(){
if (this.render) {
this.isInitialized = this.render.native_glIsInit();
if (!this.isInitialized) {
this.init();
}
}
}
protected runPendingOnDrawTasks(){
while(this.runOnDraw.length>0){
this.runOnDraw.removeFirst().run();
}
}
protected addRunOnDraw(runAble:Runnable){
if (!runAble) {
return;
}
this.runOnDraw.add(runAble);
}
protected setInteger(location:string,value:number){
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
that.render.native_setInteger(location,value);
}
}
this.addRunOnDraw(able);
}
protected setFloat(location:string,value:number){
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
that.render.native_setFloat(location,value);
}
}
this.addRunOnDraw(able);
}
protected setPoint(location:string,vf1:number,vf2:number){
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
that.render.native_setPoint(location,vf1,vf2);
}
}
this.addRunOnDraw(able);
}
protected setFloat2f(location:string,value:Array<number>){
if (value.length !==2) {
return;
}
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
let array = new Float32Array(2);
array[0] = value[0];
array[1] = value[1];
that.render.native_setFloat2f(location,array);
}
}
this.addRunOnDraw(able);
}
protected setFloatVec2(location:string,value:Array<number>){
if (value.length !==2) {
return;
}
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
let array = new Float32Array(2);
array[0] = value[0];
array[1] = value[1];
that.render.native_setFloatVec2(location,array);
}
}
this.addRunOnDraw(able);
}
protected setFloatVec3(location:string,value:Array<number>){
if (value.length !==3) {
return;
}
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
let array = new Float32Array(3);
array[0] = value[0];
array[1] = value[1];
array[2] = value[2];
that.render.native_setFloatVec3(location,array);
}
}
this.addRunOnDraw(able);
}
protected setFloatVec4(location:string,value:Array<number>){
if (value.length !==4) {
return;
}
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
let array = new Float32Array(4);
array[0] = value[0];
array[1] = value[1];
array[2] = value[2];
array[3] = value[3];
that.render.native_setFloatVec4(location,array);
}
}
this.addRunOnDraw(able);
}
protected setUniformMatrix3f(location:string,value:Array<number>){
if (!value) {
return;
}
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
let array = new Float32Array(value.length);
for (let i = 0; i < value.length; i++) {
array[i] = value[i];
}
that.render.native_setUniformMatrix3f(location,array);
}
}
this.addRunOnDraw(able);
}
protected setUniformMatrix4f(location:string,value:Array<number>){
if (!value) {
return;
}
let that = this;
let able:Runnable = {
run(){
that.ifNeedInit();
let array = new Float32Array(value.length);
for (let i = 0; i < value.length; i++) {
array[i] = value[i];
}
that.render.native_setUniformMatrix4f(location,array);
}
}
this.addRunOnDraw(able);
}
getFilters():ArrayList<GPUImageFilter>{
return null;
}
destroy(){
this.render.native_glIsDestroy();
this.render = null;
this.isInitialized = false;
}
abstract getFilterType():GPUFilterType;
abstract onReadySize();
abstract onInitialized();
}

View File

@ -0,0 +1,38 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
import ArrayList from '@ohos.util.ArrayList';
export abstract class GPUImageFilterGroup extends GPUImageFilter{
private filters :ArrayList<GPUImageFilter>;
constructor(){
super()
this.filters = new ArrayList();
}
addFilter(aFilter:GPUImageFilter){
this.filters.add(aFilter);
}
getFilters():ArrayList<GPUImageFilter>{
return this.filters;
}
}

View File

@ -0,0 +1,36 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImageGrayscaleFilter extends GPUImageFilter{
constructor(){
super()
}
getFilterType():GPUFilterType{
return GPUFilterType.GRAYSCALE;
}
onInitialized(){
}
onReadySize(){
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImageKuwaharaFilter extends GPUImageFilter{
private _radius :number = 25;
constructor(radius?:number){
super()
if (radius) {
this._radius= radius;
}
}
getFilterType():GPUFilterType{
return GPUFilterType.KUWAHARA;
}
onInitialized(){
}
onReadySize(){
}
setRadius(radius:number){
this._radius = radius;
this.setFloat("radius",this._radius);
}
}

View File

@ -0,0 +1,43 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {GPUImageFilter} from './GPUImageFilter'
import {GPUFilterType} from '../gl/GPUFilterType'
export class GPUImagePixelationFilter extends GPUImageFilter{
private pixel :number = 1.0;
constructor(){
super()
}
getFilterType():GPUFilterType{
return GPUFilterType.PIXELATION;
}
onInitialized(){
}
onReadySize(){
}
setPixel(pixel:number){
this.pixel =pixel;
this.setFloat("imageWidthFactor",1.0/this.width);
this.setFloat("imageHeightFactor",1.0/this.height);
this.setFloat("pixel",this.pixel);
}
}

View File

@ -0,0 +1,40 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageColorMatrixFilter } from './GPUImageColorMatrixFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageSepiaToneFilter extends GPUImageColorMatrixFilter {
constructor(intensity?: number) {
super()
this.setIntensity(intensity ? intensity : 1.0);
this.setColorMatrix([
0.3588, 0.7044, 0.1368, 0.0,
0.2990, 0.5870, 0.1140, 0.0,
0.2392, 0.4696, 0.0912, 0.0,
0.0, 0.0, 0.0, 1.0
])
}
getFilterType(): GPUFilterType {
return GPUFilterType.SEPIA;
}
onReadySize() {
}
}

View File

@ -0,0 +1,37 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImage3x3TextureSamplingFilter } from './GPUImage3x3TextureSamplingFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageSketchFilter extends GPUImage3x3TextureSamplingFilter {
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.SKETCH;
}
onInitialized() {
}
onReadySize() {
this.setTexelWidth(this.width);
this.setTexelHeight(this.height);
}
}

View File

@ -0,0 +1,55 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageSwirlFilter extends GPUImageFilter {
private _radius:number = 25;
private _angle:number = 0.9;
private _xCenter:number = 0.5;
private _yCenter:number = 0.5;
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.SWIRL;
}
onInitialized() {
}
onReadySize() {
}
setRadius(radius:number){
this._radius =radius;
this.setFloat("radius",this._radius);
}
setAngle(angle:number){
this._angle = angle;
this.setFloat("angle",this._angle);
}
setCenter(x_center:number,y_center:number){
this._xCenter = x_center;
this._yCenter = y_center;
this.setPoint("center",x_center,y_center);
}
}

View File

@ -0,0 +1,49 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImage3x3TextureSamplingFilter } from './GPUImage3x3TextureSamplingFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageToonFilter extends GPUImage3x3TextureSamplingFilter {
private threshold:number = 0.2;
private quantizationLevels:number = 10.0;
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.TOON;
}
onInitialized() {
}
onReadySize() {
this.setTexelWidth(this.width);
this.setTexelHeight(this.height);
}
setThreshold(threshold:number){
this.threshold = threshold;
this.setFloat("threshold",threshold);
}
setQuantizationLevels(quantizationLevels:number){
this.quantizationLevels = quantizationLevels;
this.setFloat("quantizationLevels",quantizationLevels);
}
}

View File

@ -0,0 +1,58 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GPUImageFilter } from './GPUImageFilter'
import { GPUFilterType } from '../gl/GPUFilterType'
export class GPUImageVignetterFilter extends GPUImageFilter {
private vignetteCenter:Array<number> = [0.0,0.0];
private vignetteColor:Array<number> =[0.0,0.0,0.0];
private vignetteStart:number;
private vignetteEnd:number;
constructor() {
super()
}
getFilterType(): GPUFilterType {
return GPUFilterType.VIGNETTE;
}
onInitialized() {
}
onReadySize() {
}
setVignetteCenter(center:Array<number>){
this.vignetteCenter =center;
this.setFloatVec2("vignetteCenter",center);
}
setVignetteColor(colors:Array<number>){
this.vignetteColor = colors;
this.setFloatVec3("vignetteColor",colors);
}
setVignetteStart(start:number){
this.vignetteStart =start;
this.setFloat("vignetteStart",this.vignetteStart);
}
setVignetteEnd(end:number){
this.vignetteEnd =end;
this.setFloat("vignetteEnd",this.vignetteEnd);
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export enum GPUFilterType{
BRIGHT,
CONTRAST,
INVERT,
KUWAHARA,
PIXELATION,
SEPIA,
SKETCH,
SWIRL,
TOON,
VIGNETTE,
COLOR_M,
GRAYSCALE,
X3TEXTURE,
BLUR
}

View File

@ -0,0 +1,101 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import nativeGpu from "libnativeGpu.so"
export class NativeEglRender{
static EGLTrue : number =1;
native_EglRenderInit():void{
nativeGpu.EglRenderInit();
}
native_EglRenderSetImageData(bytes:ArrayBuffer,width:number,height:number){
nativeGpu.EglRenderSetImageData(bytes,width,height);
}
native_EglRenderSetIntParams(paramType:number,param:number){
nativeGpu.EglRenderSetIntParams(paramType,param);
}
native_EglBitmapFromGLSurface(x:number,y:number,w:number,h:number):ArrayBuffer{
let num = nativeGpu.EglPixelMapSurface(x,y,w,h);
return num;
}
native_glIsInit():boolean{
let initStatus= nativeGpu.EglIsInit();
if (initStatus === NativeEglRender.EGLTrue) {
return true;
}
return false;
}
native_EglUseProgram(){
nativeGpu.EglUseProgram();
}
native_EglRendering(){
nativeGpu.EglRendering();
}
native_setInteger(key:string,value:number){
nativeGpu.EglUniform1i(key,value)
}
native_setFloat(key:string,value:number){
nativeGpu.EglUniform1f(key,value)
}
native_setPoint(key:string,vf1:number,vf2:number){
nativeGpu.EglUniform2fv(key,vf1,vf2);
}
native_setFloat2f(key:string,value:Float32Array){
this.native_setTypeArray(key,"glUniform2f",value);
}
native_setFloatVec2(key:string,value:Float32Array){
this.native_setTypeArray(key,"glUniform2fv",value);
}
native_setFloatVec3(key:string,value:Float32Array){
this.native_setTypeArray(key,"glUniform3fv",value);
}
native_setFloatVec4(key:string,value:Float32Array){
this.native_setTypeArray(key,"glUniform4fv",value);
}
native_setFloatArray(key:string,value:Float32Array){
this.native_setTypeArray(key,"glUniform1fv",value);
}
native_setUniformMatrix3f(key:string,value:Float32Array){
nativeGpu.EglSetTypeArrayOfMatrix3f(key,value);
}
native_setUniformMatrix4f(key:string,value:Float32Array){
nativeGpu.EglSetTypeArrayOfMatrix4f(key,value);
}
native_setTypeArray(key:string,uniformType:string,data:Float32Array){
nativeGpu.EglSetTypeArrayOfFloat(key,uniformType,data);
}
native_glIsDestroy(){
nativeGpu.EglDestroy();
}
}

View File

@ -0,0 +1,18 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export interface Runnable{
run();
}

View File

@ -0,0 +1,10 @@
{
"module": {
"name": "gpu_transform",
"type": "har",
"deviceTypes": [
"default",
"tablet"
]
}
}

View File

@ -0,0 +1,8 @@
{
"string": [
{
"name": "page_show",
"value": "page from npm package"
}
]
}

View File

@ -0,0 +1,8 @@
{
"string": [
{
"name": "page_show",
"value": "page from npm package"
}
]
}

View File

@ -0,0 +1,8 @@
{
"string": [
{
"name": "page_show",
"value": "page from npm package"
}
]
}

View File

@ -70,6 +70,9 @@ export * from './src/main/ets/components/imageknife/transform/SepiaFilterTransfo
export * from './src/main/ets/components/imageknife/transform/SketchFilterTransformation'
export * from './src/main/ets/components/imageknife/transform/MaskTransformation'
export * from './src/main/ets/components/imageknife/transform/SwirlFilterTransformation'
export * from './src/main/ets/components/imageknife/transform/KuwaharaFilterTransform'
export * from './src/main/ets/components/imageknife/transform/ToonFilterTransform'
export * from './src/main/ets/components/imageknife/transform/VignetteFilterTransform'
export * from './src/main/ets/components/imageknife/transform/TransformUtils'
export * from './src/main/ets/components/imageknife/transform/TransformType'
export * from './src/main/ets/components/imageknife/transform/pixelmap/CenterCrop'

View File

@ -12,6 +12,7 @@
},
"main": "index.ets",
"repository": "https://gitee.com/openharmony-tpc/ImageKnife",
"type": "module",
"version": "1.0.5",
"dependencies": {
"pako": "^1.0.5",
@ -19,7 +20,8 @@
"@ohos/disklrucache": "^1.0.0",
"@ohos/svg": "1.1.0",
"crc-32": "^1.2.0",
"spark-md5": "^3.0.2"
"spark-md5": "^3.0.2",
"@ohos/gpu_transform": "file:../gpu_transform"
},
"tags": [
"ImageCache",
@ -27,6 +29,5 @@
],
"license": "Apache License 2.0",
"devDependencies": {},
"name": "@ohos/imageknife",
"type": "module"
"name": "@ohos/imageknife"
}

View File

@ -13,36 +13,39 @@
* limitations under the License.
*/
import {DiskStrategy} from "../cache/diskstrategy/DiskStrategy"
import {AsyncCallback} from "../imageknife/interface/asynccallback"
import {AsyncSuccess} from "../imageknife/interface/AsyncSuccess"
import {IAllCacheInfoCallback} from "../imageknife/interface/IAllCacheInfoCallback"
import {AUTOMATIC} from "../cache/diskstrategy/enum/AUTOMATIC"
import {BaseTransform} from "../imageknife/transform/BaseTransform"
import {RotateImageTransformation} from "../imageknife/transform/RotateImageTransformation"
import {ImageKnifeData} from "../imageknife/ImageKnifeData"
import {CenterCrop} from '../imageknife/transform/pixelmap/CenterCrop'
import {CenterInside} from '../imageknife/transform/pixelmap/CenterInside'
import {FitCenter} from '../imageknife/transform/pixelmap/FitCenter'
import {RoundedCornersTransformation} from '../imageknife/transform/RoundedCornersTransformation'
import { DiskStrategy } from "../cache/diskstrategy/DiskStrategy"
import { AsyncCallback } from "../imageknife/interface/asynccallback"
import { AsyncSuccess } from "../imageknife/interface/AsyncSuccess"
import { IAllCacheInfoCallback } from "../imageknife/interface/IAllCacheInfoCallback"
import { AUTOMATIC } from "../cache/diskstrategy/enum/AUTOMATIC"
import { BaseTransform } from "../imageknife/transform/BaseTransform"
import { RotateImageTransformation } from "../imageknife/transform/RotateImageTransformation"
import { ImageKnifeData } from "../imageknife/ImageKnifeData"
import { CenterCrop } from '../imageknife/transform/pixelmap/CenterCrop'
import { CenterInside } from '../imageknife/transform/pixelmap/CenterInside'
import { FitCenter } from '../imageknife/transform/pixelmap/FitCenter'
import { RoundedCornersTransformation } from '../imageknife/transform/RoundedCornersTransformation'
import {CropCircleTransformation} from '../imageknife/transform/CropCircleTransformation'
import { CropCircleTransformation } from '../imageknife/transform/CropCircleTransformation'
import {CropCircleWithBorderTransformation} from '../imageknife/transform/CropCircleWithBorderTransformation'
import {CropSquareTransformation} from '../imageknife/transform/CropSquareTransformation'
import {CropTransformation} from '../imageknife/transform/CropTransformation'
import {CropType} from '../imageknife/transform/CropTransformation'
import {GrayscaleTransformation} from '../imageknife/transform/GrayscaleTransformation'
import {BrightnessFilterTransformation} from '../imageknife/transform/BrightnessFilterTransformation'
import {ContrastFilterTransformation} from '../imageknife/transform/ContrastFilterTransformation'
import {InvertFilterTransformation} from '../imageknife/transform/InvertFilterTransformation'
import {SepiaFilterTransformation} from '../imageknife/transform/SepiaFilterTransformation'
import {SketchFilterTransformation} from '../imageknife/transform/SketchFilterTransformation'
import {BlurTransformation} from '../imageknife/transform/BlurTransformation'
import {PixelationFilterTransformation} from '../imageknife/transform/PixelationFilterTransformation'
import {MaskTransformation} from '../imageknife/transform/MaskTransformation'
import {SwirlFilterTransformation} from '../imageknife/transform/SwirlFilterTransformation'
import {LogUtil} from '../imageknife/utils/LogUtil'
import { CropCircleWithBorderTransformation } from '../imageknife/transform/CropCircleWithBorderTransformation'
import { CropSquareTransformation } from '../imageknife/transform/CropSquareTransformation'
import { CropTransformation } from '../imageknife/transform/CropTransformation'
import { CropType } from '../imageknife/transform/CropTransformation'
import { GrayscaleTransformation } from '../imageknife/transform/GrayscaleTransformation'
import { BrightnessFilterTransformation } from '../imageknife/transform/BrightnessFilterTransformation'
import { ContrastFilterTransformation } from '../imageknife/transform/ContrastFilterTransformation'
import { InvertFilterTransformation } from '../imageknife/transform/InvertFilterTransformation'
import { SepiaFilterTransformation } from '../imageknife/transform/SepiaFilterTransformation'
import { SketchFilterTransformation } from '../imageknife/transform/SketchFilterTransformation'
import { BlurTransformation } from '../imageknife/transform/BlurTransformation'
import { PixelationFilterTransformation } from '../imageknife/transform/PixelationFilterTransformation'
import { MaskTransformation } from '../imageknife/transform/MaskTransformation'
import { SwirlFilterTransformation } from '../imageknife/transform/SwirlFilterTransformation'
import { KuwaharaFilterTransform } from '../imageknife/transform/KuwaharaFilterTransform'
import { ToonFilterTransform } from '../imageknife/transform/ToonFilterTransform'
import { VignetteFilterTransform } from '../imageknife/transform/VignetteFilterTransform'
import { LogUtil } from '../imageknife/utils/LogUtil'
export class RequestOption {
loadSrc: string | PixelMap | Resource;
@ -67,7 +70,6 @@ export class RequestOption {
retryholderSrc: PixelMap | Resource;
retryholderFunc: AsyncSuccess<ImageKnifeData>
retryholderData: ImageKnifeData
size: {
width: number,
height: number
@ -78,6 +80,8 @@ export class RequestOption {
onlyRetrieveFromCache: boolean = false;
isCacheable: boolean = true;
//开启GPU变换绘制
isOpenGpuTransform: boolean = false;
// 变换相关
transformations: Array<BaseTransform<PixelMap>> = new Array();
generateCacheKey: string = "";
@ -173,10 +177,10 @@ export class RequestOption {
return this;
}
thumbnail(sizeMultiplier: number, func?: AsyncSuccess<ImageKnifeData>,displayTime?:number) {
thumbnail(sizeMultiplier: number, func?: AsyncSuccess<ImageKnifeData>, displayTime?: number) {
this.thumbSizeMultiplier = sizeMultiplier;
this.thumbHolderFunc = func;
if(displayTime){
if (displayTime) {
this.thumbDelayTime = displayTime;
}
return this;
@ -187,8 +191,6 @@ export class RequestOption {
return this;
}
addListener(func: AsyncCallback<ImageKnifeData>) {
this.requestListeners.push(func);
return this;
@ -218,111 +220,153 @@ export class RequestOption {
this.transformations.push(new CenterCrop());
return this;
}
centerInside() {
this.transformations.push(new CenterInside());
return this;
}
fitCenter() {
this.transformations.push(new FitCenter());
return this;
}
roundedCorners(obj:{ top_left: number, top_right: number, bottom_left: number, bottom_right: number }){
let transformation = new RoundedCornersTransformation({top_left: obj.top_left, top_right: obj.top_right, bottom_left: obj.bottom_left, bottom_right: obj.bottom_right})
roundedCorners(obj: {
top_left: number,
top_right: number,
bottom_left: number,
bottom_right: number
}) {
let transformation = new RoundedCornersTransformation({
top_left: obj.top_left,
top_right: obj.top_right,
bottom_left: obj.bottom_left,
bottom_right: obj.bottom_right
})
this.transformations.push(transformation);
return this;
}
cropCircle(){
cropCircle() {
let transformation = new CropCircleTransformation()
this.transformations.push(transformation);
return this;
}
cropCircleWithBorder(border:number, obj:{ r_color: number, g_color: number, b_color: number }){
let transformation = new CropCircleWithBorderTransformation(border,obj)
cropCircleWithBorder(border: number, obj: {
r_color: number,
g_color: number,
b_color: number
}) {
let transformation = new CropCircleWithBorderTransformation(border, obj)
this.transformations.push(transformation);
return this;
}
cropSquare(){
cropSquare() {
let transformation = new CropSquareTransformation()
this.transformations.push(transformation);
return this;
}
crop(width: number, height: number, cropType: CropType){
crop(width: number, height: number, cropType: CropType) {
let transformation = new CropTransformation(width, height, cropType)
this.transformations.push(transformation);
return this;
}
grayscale(){
grayscale() {
let transformation = new GrayscaleTransformation()
this.transformations.push(transformation);
return this;
}
brightnessFilter(brightness:number){
brightnessFilter(brightness: number) {
let transformation = new BrightnessFilterTransformation(brightness)
this.transformations.push(transformation);
return this;
}
contrastFilter(contrast:number){
contrastFilter(contrast: number) {
let transformation = new ContrastFilterTransformation(contrast)
this.transformations.push(transformation);
return this;
}
invertFilter(){
invertFilter() {
let transformation = new InvertFilterTransformation()
this.transformations.push(transformation);
return this;
}
sepiaFilter(){
sepiaFilter() {
let transformation = new SepiaFilterTransformation()
this.transformations.push(transformation);
return this;
}
sketchFilter(){
sketchFilter() {
let transformation = new SketchFilterTransformation()
this.transformations.push(transformation);
return this;
}
blur(radius: number){
blur(radius: number) {
let transformation = new BlurTransformation(radius)
this.transformations.push(transformation);
return this;
}
pixelationFilter(pixel: number){
pixelationFilter(pixel: number) {
let transformation = new PixelationFilterTransformation(pixel)
this.transformations.push(transformation);
return this;
}
swirlFilter(degree: number){
swirlFilter(degree: number) {
let transformation = new SwirlFilterTransformation(degree)
this.transformations.push(transformation);
return this;
}
mask(maskResource: Resource){
mask(maskResource: Resource) {
let transformation = new MaskTransformation(maskResource)
this.transformations.push(transformation);
return this;
}
transform(input:BaseTransform<PixelMap>){
kuwaharaFilter(radius: number) {
let transformation = new KuwaharaFilterTransform(radius);
this.transformations.push(transformation);
return this;
}
toonFilter(threshold: number, quantizationLevels: number) {
let transformation = new ToonFilterTransform(threshold, quantizationLevels);
this.transformations.push(transformation);
return this;
}
vignetteFilter(centerPoint: Array<number>, vignetteColor: Array<number>, vignetteSpace: Array<number>) {
let transformation = new VignetteFilterTransform(centerPoint, vignetteColor, vignetteSpace);
this.transformations.push(transformation);
return this;
}
transform(input: BaseTransform<PixelMap>) {
this.transformations.push(input);
return this;
}
transforms(inputs:BaseTransform<PixelMap>[]){
transforms(inputs: BaseTransform<PixelMap>[]) {
this.transformations = inputs;
return this;
}
//开启GPU变换绘制
openEfficient() {
this.isOpenGpuTransform = true;
return this;
}
// 占位图解析成功
placeholderOnComplete(imageKnifeData: ImageKnifeData) {
@ -340,8 +384,6 @@ export class RequestOption {
}
// 缩略图解析成功
thumbholderOnComplete(imageKnifeData: ImageKnifeData) {
if (!this.loadMainReady && !(this.loadErrorReady || this.loadRetryReady)) {
@ -369,15 +411,15 @@ export class RequestOption {
LogUtil.log("失败占位图解析失败 error =" + error)
}
retryholderOnComplete(imageKnifeData: ImageKnifeData){
retryholderOnComplete(imageKnifeData: ImageKnifeData) {
this.retryholderData = imageKnifeData;
if(this.loadRetryReady){
if (this.loadRetryReady) {
this.retryholderFunc(imageKnifeData)
}
}
retryholderOnError(error){
LogUtil.log("重试占位图解析失败 error ="+ error)
retryholderOnError(error) {
LogUtil.log("重试占位图解析失败 error =" + error)
}
loadComplete(imageKnifeData: ImageKnifeData) {
@ -394,13 +436,13 @@ export class RequestOption {
}
loadError(err) {
LogUtil.log("loadError:"+err);
LogUtil.log("loadError stack=:"+JSON.stringify(err.stack));
LogUtil.log("loadError:" + err);
LogUtil.log("loadError stack=:" + JSON.stringify(err.stack));
//失败占位图展示规则
if (this.retryholderFunc) {
// 重试图层优先于加载失败展示
this.loadRetryReady = true;
if(this.retryholderData != null){
if (this.retryholderData != null) {
this.retryholderFunc(this.retryholderData)
}
} else {

View File

@ -71,7 +71,11 @@ export class BlurTransformation implements BaseTransform<PixelMap> {
}
imageSource.createPixelMap(options)
.then((data) => {
fastBlur.blur(data, this._mRadius, true, func);
if (request.isOpenGpuTransform) {
fastBlur.blurGPU(data, this._mRadius, true, func);
} else {
fastBlur.blur(data, this._mRadius, true, func);
}
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);

View File

@ -17,9 +17,9 @@ import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import { LogUtil } from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageBrightnessFilter } from '@ohos/gpu_transform'
/**
* brightness value ranges from -1.0 to 1.0, with 0.0 as the normal level
@ -78,6 +78,18 @@ export class BrightnessFilterTransformation implements BaseTransform<PixelMap> {
let bufferData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.isOpenGpuTransform) {
let filter = new GPUImageBrightnessFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setBrightness(this._mBrightness);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
var dataArray = new Uint8Array(bufferData);
for (let index = 0; index < dataArray.length; index += 4) {

View File

@ -17,9 +17,9 @@ import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import { LogUtil } from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageContrastFilter } from '@ohos/gpu_transform'
/**
* 以24位色图像为例子每种色彩都可以用0-255
@ -91,6 +91,18 @@ export class ContrastFilterTransformation implements BaseTransform<PixelMap> {
let bufferData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.isOpenGpuTransform) {
let filter = new GPUImageContrastFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setContrast(this._mContrast)
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
var dataArray = new Uint8Array(bufferData);
let brightness = 0; //亮度的偏移量可以默认0

View File

@ -20,6 +20,7 @@ import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageGrayscaleFilter } from '@ohos/gpu_transform'
export class GrayscaleTransformation implements BaseTransform<PixelMap> {
getName() {
@ -70,6 +71,18 @@ export class GrayscaleTransformation implements BaseTransform<PixelMap> {
let bufferNewData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.isOpenGpuTransform) {
let filter = new GPUImageGrayscaleFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
var dataArray = new Uint8Array(bufferData);
var dataNewArray = new Uint8Array(bufferNewData);

View File

@ -19,6 +19,7 @@ import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageColorInvertFilter } from '@ohos/gpu_transform'
/**
** Image inversion is particularly useful for enhancing white or gray detail in
@ -78,6 +79,18 @@ export class InvertFilterTransformation implements BaseTransform<PixelMap> {
let bufferData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.isOpenGpuTransform) {
let filter = new GPUImageColorInvertFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
var dataArray = new Uint8Array(bufferData);
for (let index = 0; index < dataArray.length; index += 4) {

View File

@ -0,0 +1,101 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import image from "@ohos.multimedia.image"
import { fastBlur } from "../utils/FastBlur"
import { LogUtil } from '../../imageknife/utils/LogUtil'
import { GPUImageKuwaharaFilter } from '@ohos/gpu_transform'
export class KuwaharaFilterTransform implements BaseTransform<PixelMap> {
private _mRadius: number;
constructor(radius: number) {
this._mRadius = radius;
}
getName() {
return "KuwaharaFilterTransform _mRadius:" + this._mRadius;
}
transform(buf: ArrayBuffer, request: RequestOption, func?: AsyncTransform<PixelMap>) {
if (!buf || buf.byteLength <= 0) {
LogUtil.log(Constants.PROJECT_TAG + ";KuwaharaFilterTransform buf is empty");
if (func) {
func(Constants.PROJECT_TAG + ";KuwaharaFilterTransform buf is empty", null);
}
return;
}
if (!request.isOpenGpuTransform) {
LogUtil.error(Constants.PROJECT_TAG + ";the KuwaharaFilterTransform supported only in GPU mode");
return;
}
var that = this;
var imageSource = image.createImageSource(buf as any);
TransformUtils.getPixelMapSize(imageSource, (error, size: {
width: number,
height: number
}) => {
if (!size) {
func(error, null)
return;
}
var pixelMapWidth = size.width;
var pixelMapHeight = size.height;
var targetWidth = request.size.width;
var targetHeight = request.size.height;
if (pixelMapWidth < targetWidth) {
targetWidth = pixelMapWidth;
}
if (pixelMapHeight < targetHeight) {
targetHeight = pixelMapHeight;
}
var options = {
editable: true,
desiredSize: {
width: targetWidth,
height: targetHeight
}
}
imageSource.createPixelMap(options)
.then((data) => {
that.kuwahara(data, targetWidth, targetHeight, func);
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);
func(e, null);
})
})
}
async kuwahara(bitmap: any, targetWidth: number, targetHeight: number, func: AsyncTransform<PixelMap>) {
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImageKuwaharaFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setRadius(this._mRadius);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight)
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
}
}

View File

@ -18,7 +18,7 @@ import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import { LogUtil } from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { pixelUtils } from "../utils/PixelUtils"
@ -76,7 +76,11 @@ export class PixelationFilterTransformation implements BaseTransform<PixelMap> {
}
imageSource.createPixelMap(options)
.then((data) => {
pixelUtils.pixel(data, this._mPixel, func);
if (request.isOpenGpuTransform) {
pixelUtils.pixelGPU(data, this._mPixel, func);
} else {
pixelUtils.pixel(data, this._mPixel, func);
}
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);

View File

@ -17,8 +17,9 @@ import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import {LogUtil} from '../../imageknife/utils/LogUtil'
import { LogUtil } from '../../imageknife/utils/LogUtil'
import image from "@ohos.multimedia.image"
import { GPUImageSepiaToneFilter } from '@ohos/gpu_transform'
/**
* Applies a simple sepia effect.
@ -72,9 +73,20 @@ export class SepiaFilterTransformation implements BaseTransform<PixelMap> {
let data = await imageSource.createPixelMap(options);
let bufferData = new ArrayBuffer(data.getPixelBytesNumber());
let bufferNewData = new ArrayBuffer(data.getPixelBytesNumber());
await data.readPixelsToBuffer(bufferData);
if (request.isOpenGpuTransform) {
let filter = new GPUImageSepiaToneFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight);
data.writeBufferToPixels(buf);
if (func) {
func("success", data);
}
return;
}
let bufferNewData = new ArrayBuffer(data.getPixelBytesNumber());
var dataArray = new Uint8Array(bufferData);
var dataNewArray = new Uint8Array(bufferNewData);

View File

@ -63,7 +63,11 @@ export class SketchFilterTransformation implements BaseTransform<PixelMap> {
}
imageSource.createPixelMap(options)
.then((data) => {
CalculatePixelUtils.sketch(data, func);
if (request.isOpenGpuTransform) {
CalculatePixelUtils.sketchGpu(data, func);
} else {
CalculatePixelUtils.sketch(data, func);
}
})
.catch((e) => {
func(e, null);

View File

@ -22,16 +22,27 @@ import image from '@ohos.multimedia.image'
import { PixelEntry } from '../entry/PixelEntry'
import { ColorUtils } from '../utils/ColorUtils'
import { CalculatePixelUtils } from '../utils/CalculatePixelUtils'
import { GPUImageSwirlFilter } from '@ohos/gpu_transform'
export class SwirlFilterTransformation implements BaseTransform<PixelMap> {
private _degree: number;
private radius: number = 0;
private _angle: number = 0.9;
private _xCenter: number = 0.5;
private _yCenter: number = 0.5;
constructor(degree: number) {
this._degree = degree;
constructor(radius: number, angle?: number, centerPoint?: Array<number>) {
this.radius = radius;
if (angle) {
this._angle = angle;
}
if (centerPoint && centerPoint.length === 2) {
this._xCenter = centerPoint[0];
this._yCenter = centerPoint[1];
}
}
getName() {
return 'SwirlFilterTransformation' + this._degree;
return 'SwirlFilterTransformation' + this.radius;
}
transform(buf: ArrayBuffer, request: RequestOption, func?: AsyncTransform<PixelMap>) {
@ -71,7 +82,7 @@ export class SwirlFilterTransformation implements BaseTransform<PixelMap> {
}
imageSource.createPixelMap(options)
.then((data) => {
this.swirl(data, this._degree, func);
this.swirl(data, this.radius, request, func);
})
.catch((e) => {
func(e, null);
@ -79,7 +90,7 @@ export class SwirlFilterTransformation implements BaseTransform<PixelMap> {
})
}
private async swirl(bitmap: any, degree: number, func?: AsyncTransform<PixelMap>) {
private async swirl(bitmap: any, degree: number, request: RequestOption, func?: AsyncTransform<PixelMap>) {
let imageInfo = await bitmap.getImageInfo();
let size = {
width: imageInfo.size.width,
@ -90,13 +101,28 @@ export class SwirlFilterTransformation implements BaseTransform<PixelMap> {
}
let width = size.width;
let height = size.height;
let pixEntry: Array<PixelEntry> = new Array();
let rgbData = CalculatePixelUtils.createInt2DArray(height, width);
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
if (request.isOpenGpuTransform) {
let filter = new GPUImageSwirlFilter();
filter.setImageData(bufferData, width, height);
filter.setRadius(degree);
filter.setAngle(this._angle)
filter.setCenter(this._xCenter, this._yCenter)
let buf = await filter.getPixelMapBuf(0, 0, width, height);
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
return;
}
let pixEntry: Array<PixelEntry> = new Array();
let rgbData = CalculatePixelUtils.createInt2DArray(height, width);
let dataArray = new Uint8Array(bufferData);
let ph = 0;

View File

@ -0,0 +1,108 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import image from "@ohos.multimedia.image"
import { fastBlur } from "../utils/FastBlur"
import { LogUtil } from '../../imageknife/utils/LogUtil'
import { GPUImageToonFilter } from '@ohos/gpu_transform'
export class ToonFilterTransform implements BaseTransform<PixelMap> {
private threshold: number = 0.2;
private quantizationLevels: number = 10.0;
constructor(threshold?: number, quantizationLevels?: number) {
if (threshold) {
this.threshold = threshold;
}
if (quantizationLevels) {
this.quantizationLevels = quantizationLevels;
}
}
getName() {
return "ToonFilterTransform threshold:" + this.threshold + ";quantizationLevels:" + this.quantizationLevels;
}
transform(buf: ArrayBuffer, request: RequestOption, func?: AsyncTransform<PixelMap>) {
if (!buf || buf.byteLength <= 0) {
LogUtil.log(Constants.PROJECT_TAG + ";ToonFilterTransform buf is empty");
if (func) {
func(Constants.PROJECT_TAG + ";ToonFilterTransform buf is empty", null);
}
return;
}
if (!request.isOpenGpuTransform) {
LogUtil.error(Constants.PROJECT_TAG + ";the ToonFilterTransform supported only in GPU mode");
return;
}
var that = this;
var imageSource = image.createImageSource(buf as any);
TransformUtils.getPixelMapSize(imageSource, (error, size: {
width: number,
height: number
}) => {
if (!size) {
func(error, null)
return;
}
var pixelMapWidth = size.width;
var pixelMapHeight = size.height;
var targetWidth = request.size.width;
var targetHeight = request.size.height;
if (pixelMapWidth < targetWidth) {
targetWidth = pixelMapWidth;
}
if (pixelMapHeight < targetHeight) {
targetHeight = pixelMapHeight;
}
var options = {
editable: true,
desiredSize: {
width: targetWidth,
height: targetHeight
}
}
imageSource.createPixelMap(options)
.then((data) => {
that.toon(data, targetWidth, targetHeight, func);
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);
func(e, null);
})
})
}
async toon(bitmap: any, targetWidth: number, targetHeight: number, func: AsyncTransform<PixelMap>) {
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImageToonFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setThreshold(this.threshold);
filter.setQuantizationLevels(this.quantizationLevels);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight)
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
}
}

View File

@ -0,0 +1,114 @@
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { BaseTransform } from "../transform/BaseTransform"
import { AsyncTransform } from "../transform/AsyncTransform"
import { Constants } from "../constants/Constants"
import { RequestOption } from "../../imageknife/RequestOption"
import { TransformUtils } from "../transform/TransformUtils"
import image from "@ohos.multimedia.image"
import { fastBlur } from "../utils/FastBlur"
import { LogUtil } from '../../imageknife/utils/LogUtil'
import { GPUImageVignetterFilter } from '@ohos/gpu_transform'
export class VignetteFilterTransform implements BaseTransform<PixelMap> {
private centerPoint: Array<number> = [0.5, 0.5];
private vignetteColor: Array<number> = [0.0, 0.0, 0.0];
private vignetteSpace: Array<number> = [0.3, 0.75];
constructor(centerPoint: Array<number>, vignetteColor: Array<number>, vignetteSpace: Array<number>) {
if (centerPoint.length === 2) {
this.centerPoint = centerPoint;
}
if (vignetteColor.length === 3) {
this.vignetteColor = vignetteColor;
}
if (vignetteSpace.length === 2) {
this.vignetteSpace = vignetteSpace;
}
}
getName() {
return "VignetteFilterTransform centerPoint:" + this.centerPoint + ";vignetteColor:" + this.vignetteColor + ";vignetteSpace:" + this.vignetteSpace;
}
transform(buf: ArrayBuffer, request: RequestOption, func?: AsyncTransform<PixelMap>) {
if (!buf || buf.byteLength <= 0) {
LogUtil.log(Constants.PROJECT_TAG + ";VignetteFilterTransform buf is empty");
if (func) {
func(Constants.PROJECT_TAG + ";VignetteFilterTransform buf is empty", null);
}
return;
}
if (!request.isOpenGpuTransform) {
LogUtil.error(Constants.PROJECT_TAG + ";the VignetteFilterTransform supported only in GPU mode");
return;
}
var that = this;
var imageSource = image.createImageSource(buf as any);
TransformUtils.getPixelMapSize(imageSource, (error, size: {
width: number,
height: number
}) => {
if (!size) {
func(error, null)
return;
}
var pixelMapWidth = size.width;
var pixelMapHeight = size.height;
var targetWidth = request.size.width;
var targetHeight = request.size.height;
if (pixelMapWidth < targetWidth) {
targetWidth = pixelMapWidth;
}
if (pixelMapHeight < targetHeight) {
targetHeight = pixelMapHeight;
}
var options = {
editable: true,
desiredSize: {
width: targetWidth,
height: targetHeight
}
}
imageSource.createPixelMap(options)
.then((data) => {
that.vignette(data, targetWidth, targetHeight, func);
})
.catch((e) => {
LogUtil.log(Constants.PROJECT_TAG + ";error:" + e);
func(e, null);
})
})
}
async vignette(bitmap: any, targetWidth: number, targetHeight: number, func: AsyncTransform<PixelMap>) {
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImageVignetterFilter();
filter.setImageData(bufferData, targetWidth, targetHeight);
filter.setVignetteCenter(this.centerPoint);
filter.setVignetteColor(this.vignetteColor);
filter.setVignetteStart(this.vignetteSpace[0]);
filter.setVignetteEnd(this.vignetteSpace[1]);
let buf = await filter.getPixelMapBuf(0, 0, targetWidth, targetHeight)
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
}
}

View File

@ -12,9 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {PixelEntry} from "../entry/PixelEntry"
import {AsyncTransform} from "../transform/AsyncTransform"
import {ColorUtils} from "./ColorUtils"
import { PixelEntry } from "../entry/PixelEntry"
import { AsyncTransform } from "../transform/AsyncTransform"
import { ColorUtils } from "./ColorUtils"
import { GPUImageSketchFilter } from '@ohos/gpu_transform'
export namespace CalculatePixelUtils {
export async function sketch(p: any, func: AsyncTransform<PixelMap>) {
@ -96,7 +97,7 @@ export namespace CalculatePixelUtils {
}
var gaussGray = (psrc: Array<number>, horz: number, vert: number,
width: number, height: number): number=> {
width: number, height: number): number => {
let dst, src, n_p, n_m, d_p, d_m, bd_p, bd_m, val_p, val_m, initial_p, initial_m: Array<number>;
let i, j, t, k, row, col, terms, std_dev, sp_p_idx, sp_m_idx, vp_idx, vm_idx: number;
let row_stride = width;
@ -204,8 +205,8 @@ export namespace CalculatePixelUtils {
}
var findConstants = (n_p: Array<number>, n_m: Array<number>, d_p: Array<number>,
d_m: Array<number>, bd_p: Array<number>
, bd_m: Array<number>, std_dev: number)=> {
d_m: Array<number>, bd_p: Array<number>
, bd_m: Array<number>, std_dev: number) => {
let div = Math.sqrt(2 * 3.141593) * std_dev;
let x0 = -1.783 / std_dev;
let x1 = -1.723 / std_dev;
@ -263,16 +264,16 @@ export namespace CalculatePixelUtils {
}
var transferGaussPixels = (src1: Array<number>, src2: Array<number>,
dest: Array<number>, bytes: number, width: number)=> {
dest: Array<number>, bytes: number, width: number) => {
let i, j, k, b, sum: number;
let bend = bytes * width;
i = j = k = 0;
for (b = 0; b < bend; b++) {
sum = src1[i++] + src2[j++];
if (sum > 255)
sum = 255;
sum = 255;
else if (sum < 0)
sum = 0;
sum = 0;
dest[k++] = sum;
}
}
@ -296,4 +297,30 @@ export namespace CalculatePixelUtils {
}
return array;
}
export async function sketchGpu(p: any, func: AsyncTransform<PixelMap>) {
let imageInfo = await p.getImageInfo();
let size = {
width: imageInfo.size.width,
height: imageInfo.size.height
}
if (!size) {
func(new Error("sketch The image size does not exist."), null)
return;
}
let w = size.width;
let h = size.height;
let bufferData = new ArrayBuffer(p.getPixelBytesNumber());
await p.readPixelsToBuffer(bufferData);
let filter = new GPUImageSketchFilter();
filter.setImageData(bufferData, w, h);
filter.getPixelMapBuf(0, 0, w, h).then((buf) => {
p.writeBufferToPixels(buf);
if (func) {
func("success", p);
}
})
}
}

View File

@ -17,7 +17,7 @@ import {CalculatePixelUtils} from "./CalculatePixelUtils"
import {PixelEntry} from "../entry/PixelEntry"
import {AsyncTransform} from "../transform/AsyncTransform"
import {ColorUtils} from "./ColorUtils"
import { GPUImageBlurFilter } from '@ohos/gpu_transform'
export namespace fastBlur {
@ -290,4 +290,37 @@ export namespace fastBlur {
func("success", bitmap);
}
}
export async function blurGPU(bitmap: any, radius: number, canReuseInBitmap: boolean, func: AsyncTransform<PixelMap>) {
if (radius < 1) {
func("error,radius must be greater than 1 ", null);
return;
}
let imageInfo = await bitmap.getImageInfo();
let size = {
width: imageInfo.size.width,
height: imageInfo.size.height
}
if (!size) {
func(new Error("fastBlur The image size does not exist."), null)
return;
}
let w = size.width;
let h = size.height;
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImageBlurFilter();
filter.setImageData(bufferData, w, h);
filter.setBlurRadius(radius);
filter.setBlurOffset([1.0, 1.0])
filter.getPixelMapBuf(0, 0, w, h).then((buf) => {
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
})
}
}

View File

@ -17,6 +17,7 @@ import {CalculatePixelUtils} from "./CalculatePixelUtils"
import {PixelEntry} from "../entry/PixelEntry"
import {AsyncTransform} from "../transform/AsyncTransform"
import {ColorUtils} from "./ColorUtils"
import {GPUImagePixelationFilter} from '@ohos/gpu_transform'
export namespace pixelUtils {
@ -129,4 +130,30 @@ export namespace pixelUtils {
func("success", bitmap);
}
}
export async function pixelGPU(bitmap: any, pixel: number, func: AsyncTransform<PixelMap>) {
let imageInfo = await bitmap.getImageInfo();
let size = {
width: imageInfo.size.width,
height: imageInfo.size.height
}
if (!size) {
func(new Error("GrayscaleTransformation The image size does not exist."), null)
return;
}
let w = size.width;
let h = size.height;
let bufferData = new ArrayBuffer(bitmap.getPixelBytesNumber());
await bitmap.readPixelsToBuffer(bufferData);
let filter = new GPUImagePixelationFilter();
filter.setImageData(bufferData, w, h);
filter.setPixel(pixel)
filter.getPixelMapBuf(0, 0, w, h).then((buf) => {
bitmap.writeBufferToPixels(buf);
if (func) {
func("success", bitmap);
}
})
}
}