diff --git a/AppScope/app.json5 b/AppScope/app.json5
index 0e7a6d4..6651a56 100644
--- a/AppScope/app.json5
+++ b/AppScope/app.json5
@@ -3,7 +3,7 @@
"bundleName": "com.openharmony.imageknife",
"vendor": "example",
"versionCode": 1000000,
- "versionName": "2.1.1-rc.3",
+ "versionName": "2.1.1-rc.4",
"icon": "$media:app_icon",
"label": "$string:app_name",
"distributedNotificationEnabled": true
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4112ba6..adb55c0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,14 @@
+## 2.1.1-rc.4
+
+- 删除pako源码依赖,使用ohpm依赖
+- 删除gif软解码相关依赖库,包括gifuct-js和jsBinarySchemaParser
+- 新增ImageKnife在HSP场景中的使用案例展示
+- 更改ImageKnifeOption:
+ 新增可选参数context,HSP场景中在shard library中使用必须要传递当前library的context对象 (例如:getContext(this).createModuleContext('library') as common.UIAbilityContext)才能保证后续程序正常获取shared library中的Resource资源
+- 更改RequestOption:
+ 新增接口setModuleContext(moduleCtx:common.UIAbilityContext)在HSP场景中必须调用该接口传入正确的context对象,保证HSP场景下正确访问资源
+ 新增接口getModuleContext():common.UIAbilityContext | undefined
+
## 2.1.1-rc.3
- 门面类ImageKnife新增pauseRequests接口,全局暂停请求
diff --git a/README.OpenSource b/README.OpenSource
index 83b7fef..20e0927 100644
--- a/README.OpenSource
+++ b/README.OpenSource
@@ -66,24 +66,6 @@
"Owner" : "satazor",
"Upstream URL": "https://github.com/satazor/js-spark-md5",
"Description": "Lightning fast normal and incremental md5 for javascript"
- },
- {
- "Name": "gifuct-js",
- "License": "MIT",
- "License File": "https://github.com/matt-way/gifuct-js/blob/master/LICENSE",
- "Version Number": "1.0.0",
- "Owner" : "matt-way",
- "Upstream URL": "https://github.com/matt-way/gifuct-js",
- "Description": "Fastest javascript .GIF decoder/parser"
- },
- {
- "Name": "jsBinarySchemaParser",
- "License": "MIT",
- "License File": "https://github.com/matt-way/jsBinarySchemaParser/blob/master/LICENSE",
- "Version Number": "v1.0.1",
- "Owner" : "matt-way",
- "Upstream URL": "https://github.com/matt-way/jsBinarySchemaParser",
- "Description": "Parse binary files in javascript using clean schema objects"
}
]
\ No newline at end of file
diff --git a/README.md b/README.md
index 27f278e..35a64ad 100644
--- a/README.md
+++ b/README.md
@@ -22,26 +22,9 @@
ohpm install @ohos/imageknife
```
-## 使用说明
+## 使用说明
### 1.依赖配置
-
-在项目中entry/oh-package.json5中做如下修改,然后点击Sync Now:
-
-```json5
-{
- "name": "entry",
- "version": "1.0.0",
- "description": "Please describe the basic information.",
- "main": "",
- "author": "",
- "license": "",
- "dependencies": {
- "@ohos/imageknife": "^2.1.1-rc.3"
- }
-}
-```
-
在entry\src\main\ets\entryability\EntryAbility.ts中做如下配置初始化全局ImageKnife实例:
```typescript
@@ -104,130 +87,7 @@ svg类型图片即可。
加载GIF其实和普通流程也没有区别只要将 `loadSrc: $r('app.media.jpgSample'),` `改成一张 loadSrc: $r('app.media.gifSample'),`
GIF图片即可。
-但是解析gif图片属于耗时操作,所以我们需要将其放入子线程操作。 **这里我们需要在页面的创建和销毁上添加一个worker子线程操作。
-**
-
-```extendtypescript
-import router from '@ohos.router'
-
-import { ImageKnifeComponent, ImageKnifeOption,ImageKnife } from '@ohos/imageknife'
-import worker from '@ohos.worker';
-
-@Entry
-@Component
-struct IndexFunctionDemo {
- private globalGifWorker?:worker.ThreadWorker = undefined;
- @State imageKnifeOption1: ImageKnifeOption = {
- loadSrc: $r('app.media.icon'),
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed')
- };
- @State imageKnifeOption2: ImageKnifeOption = {
- loadSrc: $r('app.media.icon'),
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed')
- };
-
- build() {
- Scroll() {
- Flex({ direction: FlexDirection.Column, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
- Text("简单示例1:加载一张本地png图片").fontSize(15)
- Flex({ direction: FlexDirection.Row, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
- Button("加载PNG")
- .onClick(() => {
- this.imageKnifeOption1 = {
- loadSrc: $r('app.media.pngSample'),
-
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed')
- }
- }).margin({ top: 5, left: 3 })
- ImageKnifeComponent({ imageKnifeOption: this.imageKnifeOption1 }).width(300).height(300)
- }.width('100%').backgroundColor(Color.Pink)
-
- Text("简单示例2:加载一张网络gif图片").fontSize(15)
- Text("gif解析在子线程,请在页面构建后创建worker,注入imageknife").fontSize(15)
- Flex({ direction: FlexDirection.Row, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
- Button("加载GIF")
- .onClick(() => {
- this.imageKnifeOption2 = {
- loadSrc: 'https://gd-hbimg.huaban.com/e0a25a7cab0d7c2431978726971d61720732728a315ae-57EskW_fw658',
-
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed'),
- displayProgress: true,
- }
- }).margin({ top: 5, left: 3 })
- ImageKnifeComponent({ imageKnifeOption: this.imageKnifeOption2 }).width(300).height(300)
- }.width('100%').backgroundColor(Color.Pink)
-
- Flex({ direction: FlexDirection.Row, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
- Button("ImageKnife测试目录页面")
- .onClick(() => {
- console.log("pages/imageknifeTestCaseIndex 页面跳转")
- router.pushUrl({ url: "pages/imageknifeTestCaseIndex" });
- }).margin({ top: 15 })
- }.width('100%').height(60).backgroundColor(Color.Pink)
- }
- }
- .width('100%')
- .height('100%')
- }
-
- aboutToAppear() {
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- // gif解析在子线程,请在页面构建后创建worker,注入imageknife
- let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife != undefined) {
- imageKnife.setGifWorker(this.globalGifWorker)
- }
- }
-
- aboutToDisappear() {
- // 页面销毁 销毁worker
- if (this.globalGifWorker) {
- this.globalGifWorker.terminate();
- }
- }
-}
-```
-
-### 5.创建worker
-
-由于使用到了worker,目前worker创建只能在entry里面,所以我这边着重讲一下worker配置的流程。
-
-在entry目录上:点击鼠标右键 --> New --> Worker
-
-
-
-在弹出窗口中填入即将创建的worker的名称GifLoadWorker,点击弹窗右下角Finish,
-
-
-
-DevEco将自动在entry/src/main/ets/workers目录下生成GifLoadWorker.ts文件,并在其中生成模板代码
-
-
-
-同时entry模块下build-profile.json5也会自动生成新建的worker的配置信息
-
-
-
-接下来我们在GifLoadWorker.ts中导入gif处理方法
-
-```extendtypescript
-import { gifHandler } from '@ohos/imageknife/GifWorker'
-```
-同时设置
-```extendtypescript
-workerPort.onmessage = gifHandler
-```
-
-经过了上面的配置,就配置好了worker,GIF图片加载就能顺利进行了。 如果GIF加载未成功,可以检查一下worker配置是否完成,或者参考entry/src/main/ets/pages/testGifLoadWithWorkerPage.ets中的gif加载写法。
-
-### 6.自定义Key
+### 5.自定义Key
因为通常改变标识符比较困难或者根本不可能,所以ImageKnife也提供了 签名 API 来混合(你可以控制的)额外数据到你的缓存键中。
签名(signature)适用于媒体内容,也适用于你可以自行维护的一些版本元数据。
@@ -582,7 +442,6 @@ DevEco Studio 4.0 Release(4.0.3.413)--SDK( 4.0.10.3)原型机:NOH-AN00
- cropImagePage2.ets # 手势裁剪页面
- frescoImageTestCasePage.ets # 测试属性动画组件切换
- frescoRetryTestCasePage.ets # 测试ImageKnifeComponent加载失败重试
- - gifTestCasePage.ets # 测试gif解析页面
- svgTestCasePage.ets # 测试svg解析页面
- imageknifeTestCaseIndex.ets # 测试用例页面入口
- index.ets # 程序入口页面
@@ -593,7 +452,6 @@ DevEco Studio 4.0 Release(4.0.3.413)--SDK( 4.0.10.3)原型机:NOH-AN00
- storageTestDiskLruCache.ets # 磁盘缓存测试
- storageTestLruCache.ets # 内存缓存测试
- testAllCacheInfoPage.ets # 所有缓存信息获取测试
- - testGifDontAnimatePage.ets # gif加载静态图片测试
- testImageKnifeOptionChangedPage.ets # 数据切换测试
- testImageKnifeOptionChangedPage2.ets # 数据切换测试,部分变换
- testImageKnifeOptionChangedPage3.ets # 数据切换测试,组件动画
@@ -602,12 +460,12 @@ DevEco Studio 4.0 Release(4.0.3.413)--SDK( 4.0.10.3)原型机:NOH-AN00
- testPreloadPage.ets # 预加载测试
- transformPixelMapPage.ets # 所有类型变换测试
- testSingleFrameGifPage.ets # 单帧gif加载测试
- - testGifLoadWithWorkerPage.ets # gif加载有无worker的影响测试
+
- OptionTestPage.ets # 图片缓存测试
+ - testManyGifLoadWithPage # 测试gif加载页面
-workers
- - GifLoadWorkers.ts # gif子线程解析
- - MyWorker.ts # worker使用示例
- - PngLoadWorker.ts # png子线程解析
+ - upngWorkerTestCase.ets # png子线程解析
+ - upngWorkerDepend.ts # png子线程解析具体执行
```
## 贡献代码
diff --git a/build-profile.json5 b/build-profile.json5
index eca7ce7..fa5c415 100644
--- a/build-profile.json5
+++ b/build-profile.json5
@@ -8,12 +8,12 @@
"compatibleSdkVersion": 9
}
],
- "buildModeSet":[
+ "buildModeSet": [
{
- "name":"debug"
+ "name": "debug"
},
{
- "name":"release"
+ "name": "release"
}
]
},
@@ -37,6 +37,18 @@
{
"name": "gpu_transform",
"srcPath": "./gpu_transform"
+ },
+ {
+ "name": "library",
+ "srcPath": "./library",
+ "targets": [
+ {
+ "name": "default",
+ "applyToProducts": [
+ "default"
+ ]
+ }
+ ]
}
]
}
\ No newline at end of file
diff --git a/entry/build-profile.json5 b/entry/build-profile.json5
index 52767e4..e1fd25b 100644
--- a/entry/build-profile.json5
+++ b/entry/build-profile.json5
@@ -3,9 +3,7 @@
"buildOption": {
"sourceOption": {
"workers": [
- './src/main/ets/workers/GifLoadWorker.ts',
- './src/main/ets/workers/PngLoadWorker.ts',
- './src/main/ets/workers/MyWorker.ts'
+ "./src/main/ets/workers/upngWorkerTestCase.ets"
]
}
},
diff --git a/entry/oh-package.json5 b/entry/oh-package.json5
index ee278ce..ccee721 100644
--- a/entry/oh-package.json5
+++ b/entry/oh-package.json5
@@ -4,9 +4,9 @@
"name": "entry",
"description": "example description",
"repository": {},
- "version": "2.1.1-rc.3",
+ "version": "2.1.1-rc.4",
"dependencies": {
- "@ohos/imageknife": "file:../imageknife",
+ "@ohos/libraryimageknife": "file:../library",
"@ohos/disklrucache": "^2.0.2-rc.0"
}
}
diff --git a/entry/src/main/ets/entryability/CustomEngineKeyImpl.ets b/entry/src/main/ets/entryability/CustomEngineKeyImpl.ets
index ee6521b..81a6863 100644
--- a/entry/src/main/ets/entryability/CustomEngineKeyImpl.ets
+++ b/entry/src/main/ets/entryability/CustomEngineKeyImpl.ets
@@ -12,8 +12,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import { EngineKeyFactories, EngineKeyInterface, RequestOption } from '@ohos/imageknife'
-import { ObjectKey } from '@ohos/imageknife';
+import { EngineKeyFactories, EngineKeyInterface, RequestOption } from '@ohos/libraryimageknife'
+import { ObjectKey } from '@ohos/libraryimageknife';
export class CustomEngineKeyImpl implements EngineKeyInterface {
redefineUrl: (loadSrc: string) => string;
diff --git a/entry/src/main/ets/entryability/EntryAbility.ets b/entry/src/main/ets/entryability/EntryAbility.ets
index c3d114b..3e816e7 100644
--- a/entry/src/main/ets/entryability/EntryAbility.ets
+++ b/entry/src/main/ets/entryability/EntryAbility.ets
@@ -15,7 +15,7 @@
import UIAbility from '@ohos.app.ability.UIAbility';
import hilog from '@ohos.hilog';
import window from '@ohos.window';
-import { ImageKnifeGlobal,ImageKnife,ImageKnifeDrawFactory,LogUtil } from '@ohos/imageknife'
+import {InitImageKnife, ImageKnifeGlobal,ImageKnife,ImageKnifeDrawFactory,LogUtil } from '@ohos/libraryimageknife'
import { CustomEngineKeyImpl } from './CustomEngineKeyImpl'
import abilityAccessCtrl,{Permissions} from '@ohos.abilityAccessCtrl';
import { BusinessError } from '@ohos.base'
@@ -38,7 +38,7 @@ export default class EntryAbility extends UIAbility {
windowStage.loadContent('pages/index', (err:BusinessError, data:void) => {
});
- ImageKnife.with(this.context);
+ InitImageKnife.init(this.context);
let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
if(imageKnife != undefined) {
diff --git a/entry/src/main/ets/pages/CacheRuleChangedPage.ets b/entry/src/main/ets/pages/CacheRuleChangedPage.ets
index a5592c9..ce1ef94 100644
--- a/entry/src/main/ets/pages/CacheRuleChangedPage.ets
+++ b/entry/src/main/ets/pages/CacheRuleChangedPage.ets
@@ -17,7 +17,7 @@ import {
ImageKnifeOption,
RotateImageTransformation,
RoundedCornersTransformation
-} from '@ohos/imageknife'
+} from '@ohos/libraryimageknife'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/OptionTestPage.ets b/entry/src/main/ets/pages/OptionTestPage.ets
index 1c19004..6f7f6e2 100644
--- a/entry/src/main/ets/pages/OptionTestPage.ets
+++ b/entry/src/main/ets/pages/OptionTestPage.ets
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-import { ImageKnifeComponent, ImageKnifeOption,NONE,DiskStrategy } from '@ohos/imageknife'
+import { ImageKnifeComponent, ImageKnifeOption,NONE,DiskStrategy } from '@ohos/libraryimageknife'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/SignatureTestPage.ets b/entry/src/main/ets/pages/SignatureTestPage.ets
index f2a2551..5d26d49 100644
--- a/entry/src/main/ets/pages/SignatureTestPage.ets
+++ b/entry/src/main/ets/pages/SignatureTestPage.ets
@@ -13,8 +13,8 @@
* limitations under the License.
*/
-import { ImageKnifeComponent, ImageKnifeOption, NONE, DiskStrategy } from '@ohos/imageknife'
-import { ObjectKey } from '@ohos/imageknife/src/main/ets/components/imageknife/ObjectKey';
+import { ImageKnifeComponent, ImageKnifeOption, NONE, DiskStrategy } from '@ohos/libraryimageknife'
+import { ObjectKey } from '@ohos/libraryimageknife';
@Entry
@Component
diff --git a/entry/src/main/ets/pages/basicTestFeatureAbilityPage.ets b/entry/src/main/ets/pages/basicTestFeatureAbilityPage.ets
index fb31384..cc787cc 100644
--- a/entry/src/main/ets/pages/basicTestFeatureAbilityPage.ets
+++ b/entry/src/main/ets/pages/basicTestFeatureAbilityPage.ets
@@ -12,7 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeOption,ImageKnifeComponent} from '@ohos/imageknife'
+import {ImageKnifeOption,ImageKnifeComponent} from '@ohos/libraryimageknife'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/basicTestFileIOPage.ets b/entry/src/main/ets/pages/basicTestFileIOPage.ets
index 0a5fddd..7488325 100644
--- a/entry/src/main/ets/pages/basicTestFileIOPage.ets
+++ b/entry/src/main/ets/pages/basicTestFileIOPage.ets
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-import { FileUtils, ImageKnifeGlobal} from '@ohos/imageknife'
+import { FileUtils, ImageKnifeGlobal} from '@ohos/libraryimageknife'
import resourceManager from '@ohos.resourceManager';
import { BusinessError } from '@ohos.base'
import common from '@ohos.app.ability.common';
diff --git a/entry/src/main/ets/pages/basicTestMediaImage.ets b/entry/src/main/ets/pages/basicTestMediaImage.ets
index 089eec6..e448f79 100644
--- a/entry/src/main/ets/pages/basicTestMediaImage.ets
+++ b/entry/src/main/ets/pages/basicTestMediaImage.ets
@@ -13,12 +13,12 @@
* limitations under the License.
*/
import featureAbility from '@ohos.ability.featureAbility';
-import { FileUtils } from '@ohos/imageknife'
-import { FileTypeUtil } from '@ohos/imageknife'
+import { FileUtils } from '@ohos/libraryimageknife'
+import { FileTypeUtil } from '@ohos/libraryimageknife'
import resourceManager from '@ohos.resourceManager';
-import { Base64 } from '@ohos/imageknife'
-import { ParseImageUtil } from '@ohos/imageknife'
-import { ImageKnifeGlobal } from '@ohos/imageknife'
+import { Base64 } from '@ohos/libraryimageknife'
+import { ParseImageUtil } from '@ohos/libraryimageknife'
+import { ImageKnifeGlobal } from '@ohos/libraryimageknife'
import { BusinessError } from '@ohos.base'
import common from '@ohos.app.ability.common';
@Entry
diff --git a/entry/src/main/ets/pages/basicTestResourceManagerPage.ets b/entry/src/main/ets/pages/basicTestResourceManagerPage.ets
index 824b00c..95de8fd 100644
--- a/entry/src/main/ets/pages/basicTestResourceManagerPage.ets
+++ b/entry/src/main/ets/pages/basicTestResourceManagerPage.ets
@@ -13,11 +13,11 @@
* limitations under the License.
*/
import featureAbility from '@ohos.ability.featureAbility';
-import {FileUtils} from '@ohos/imageknife'
-import {FileTypeUtil} from '@ohos/imageknife'
+import {FileUtils} from '@ohos/libraryimageknife'
+import {FileTypeUtil} from '@ohos/libraryimageknife'
import resourceManager from '@ohos.resourceManager';
-import {Base64} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
+import {Base64} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal} from '@ohos/libraryimageknife'
import { BusinessError } from '@ohos.base'
import common from '@ohos.app.ability.common';
@Entry
diff --git a/entry/src/main/ets/pages/compressPage.ets b/entry/src/main/ets/pages/compressPage.ets
index b76a7ba..578840a 100644
--- a/entry/src/main/ets/pages/compressPage.ets
+++ b/entry/src/main/ets/pages/compressPage.ets
@@ -12,10 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnife} from '@ohos/imageknife'
-import {OnRenameListener} from '@ohos/imageknife'
-import {OnCompressListener} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
+import {ImageKnife} from '@ohos/libraryimageknife'
+import {OnRenameListener} from '@ohos/libraryimageknife'
+import {OnCompressListener} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal} from '@ohos/libraryimageknife'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/cropImagePage2.ets b/entry/src/main/ets/pages/cropImagePage2.ets
index fc76184..df4ad5d 100644
--- a/entry/src/main/ets/pages/cropImagePage2.ets
+++ b/entry/src/main/ets/pages/cropImagePage2.ets
@@ -13,14 +13,14 @@
* limitations under the License.
*/
-import { CropImage } from '@ohos/imageknife'
-import { CropOptions } from '@ohos/imageknife'
-import { Crop } from '@ohos/imageknife'
-import { RecourseProvider } from '@ohos/imageknife'
-import { PixelMapCrop,Options } from '@ohos/imageknife'
-import { CropCallback } from '@ohos/imageknife'
-import { FileUtils } from '@ohos/imageknife'
-import { ImageKnifeGlobal } from '@ohos/imageknife'
+import { CropImage } from '@ohos/libraryimageknife'
+import { CropOptions } from '@ohos/libraryimageknife'
+import { Crop } from '@ohos/libraryimageknife'
+import { RecourseProvider } from '@ohos/libraryimageknife'
+import { PixelMapCrop,Options } from '@ohos/libraryimageknife'
+import { CropCallback } from '@ohos/libraryimageknife'
+import { FileUtils } from '@ohos/libraryimageknife'
+import { ImageKnifeGlobal } from '@ohos/libraryimageknife'
import { BusinessError } from '@ohos.base'
import resourceManager from '@ohos.resourceManager';
import common from '@ohos.app.ability.common'
diff --git a/entry/src/main/ets/pages/dataShareUriLoadPage.ets b/entry/src/main/ets/pages/dataShareUriLoadPage.ets
index 2a22952..fd3859f 100644
--- a/entry/src/main/ets/pages/dataShareUriLoadPage.ets
+++ b/entry/src/main/ets/pages/dataShareUriLoadPage.ets
@@ -13,7 +13,7 @@
* limitations under the License.
*/
import mediaLibrary from '@ohos.multimedia.mediaLibrary';
-import { ImageKnifeComponent, ImageKnifeOption, } from '@ohos/imageknife'
+import { ImageKnifeComponent, ImageKnifeOption, } from '@ohos/libraryimageknife'
@Entry
diff --git a/entry/src/main/ets/pages/drawFactoryTestPage.ets b/entry/src/main/ets/pages/drawFactoryTestPage.ets
index 24bbb35..433475b 100644
--- a/entry/src/main/ets/pages/drawFactoryTestPage.ets
+++ b/entry/src/main/ets/pages/drawFactoryTestPage.ets
@@ -18,7 +18,7 @@ import {
ImageKnifeOption,
ImageKnifeDrawFactory,
ScaleType
-} from '@ohos/imageknife'
+} from '@ohos/libraryimageknife'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/frescoRetryTestCasePage.ets b/entry/src/main/ets/pages/frescoRetryTestCasePage.ets
index b44bfba..c6ae9d5 100644
--- a/entry/src/main/ets/pages/frescoRetryTestCasePage.ets
+++ b/entry/src/main/ets/pages/frescoRetryTestCasePage.ets
@@ -12,10 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeComponent} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
-import {RoundedCornersTransformation} from '@ohos/imageknife'
+import {ImageKnifeComponent} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {RotateImageTransformation} from '@ohos/libraryimageknife'
+import {RoundedCornersTransformation} from '@ohos/libraryimageknife'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/gifTestCasePage.ets b/entry/src/main/ets/pages/gifTestCasePage.ets
deleted file mode 100644
index 771a395..0000000
--- a/entry/src/main/ets/pages/gifTestCasePage.ets
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Copyright (C) 2022 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import {GIFParseImpl} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
-import {ImageKnife} from '@ohos/imageknife'
-import worker from '@ohos.worker';
-import resourceManager from '@ohos.resourceManager';
-import { BusinessError } from '@ohos.base'
-import common from '@ohos.app.ability.common';
-@Entry
-@Component
-struct gifTestCasePage {
-
- @State pixels?:PixelMap = undefined
- private globalGifWorker?:worker.ThreadWorker = undefined;
-
- build() {
- Scroll() {
- Flex({ direction: FlexDirection.Column, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
- Flex({direction:FlexDirection.Row}){
- Button("加载gif图片")
- .onClick(()=>{
- ((ImageKnifeGlobal.getInstance().getHapContext() as common.UIAbilityContext).resourceManager as resourceManager.ResourceManager)
- .getMediaContent($r('app.media.test').id)
- .then(data => {
- console.log('basicTestFileIOPage - 本地加载资源 解析后数据data length= ' + data.byteLength)
- let gifImpl = new GIFParseImpl();
- gifImpl.parseGifs(data.buffer, (data,err)=>{
- if(err){
- console.log('加载gif图片 err='+err);
- }
- if(!!data){
- console.log('加载gif图片 suc,长度='+data.length)
- this.pixels = data[0]['drawPixelMap']
- }
- },undefined,true)
- })
- .catch((err:BusinessError) => {
- console.log('basicTestFileIOPage - 本地加载资源err' + JSON.stringify(err));
- })
-
- }).margin({left:5}).backgroundColor(Color.Blue)
- Button("加载gif图片自带worker")
- .onClick(()=>{
- ((ImageKnifeGlobal.getInstance().getHapContext() as common.UIAbilityContext).resourceManager as resourceManager.ResourceManager)
- .getMediaContent($r('app.media.gifSample_single_frame').id)
- .then(data => {
- console.log('basicTestFileIOPage - 本地加载资源 解析后数据data length = ' + data.byteLength)
- let local_worker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- let gifImpl = new GIFParseImpl();
- gifImpl.parseGifs(data.buffer, (data,err)=>{
- if(err){
- console.log('加载gif图片自带worker err='+err);
- }
- if(!!data){
- console.log('加载gif图片自带worker suc,长度='+data.length)
- this.pixels = data[0]['drawPixelMap']
- }
- },local_worker)
- })
- .catch((err:BusinessError) => {
- console.log('basicTestFileIOPage - 本地加载资源err' + JSON.stringify(err));
- })
-
- }).margin({left:5}).backgroundColor(Color.Blue)
- Button("加载gif图片全局配置worker")
- .onClick(()=>{
- ((ImageKnifeGlobal.getInstance().getHapContext() as common.UIAbilityContext).resourceManager as resourceManager.ResourceManager)
- .getMediaContent($r('app.media.test').id)
- .then(data => {
- console.log('basicTestFileIOPage - 本地加载资源 解析后数据data length = ' + data.byteLength)
-
- let gifImpl = new GIFParseImpl();
- gifImpl.parseGifs(data.buffer, (data,err)=>{
- if(err){
- console.log('加载gif图片自带worker err='+err);
- }
- if(!!data){
- console.log('加载gif图片自带worker suc,长度='+data.length)
- this.pixels = data[0]['drawPixelMap']
- }
- })
- })
- .catch((err:BusinessError) => {
- console.log('basicTestFileIOPage - 本地加载资源err' + JSON.stringify(err));
- })
-
- }).margin({left:5}).backgroundColor(Color.Blue)
- }
- .margin({top:15})
-
- Text("下面为展示图片区域").margin({top:5})
- Flex({direction: FlexDirection.Column, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }){
- Image(this.pixels == undefined?'':this.pixels!)
- .width(400)
- .height(400)
- .backgroundColor(Color.Pink)
- }.width(400).height(400).margin({top:10}).backgroundColor(Color.Pink)
- }
- }
- .width('100%')
- .height('100%')
- }
-
- aboutToAppear() {
- console.log("aboutToAppear()")
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife != undefined) {
- (ImageKnifeGlobal.getInstance().getImageKnife())?.setGifWorker(this.globalGifWorker)
- }
- }
- aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate();
- }
- }
- typedArrayToBuffer(array: Uint8Array): ArrayBuffer {
- return array.buffer.slice(array.byteOffset, array.byteLength + array.byteOffset)
- }
-}
-
-
diff --git a/entry/src/main/ets/pages/hspCacheTestPage.ets b/entry/src/main/ets/pages/hspCacheTestPage.ets
new file mode 100644
index 0000000..1d3c2ba
--- /dev/null
+++ b/entry/src/main/ets/pages/hspCacheTestPage.ets
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { ImageKnifeComponent, ImageKnifeOption, FileUtils,ImageKnifeGlobal } from '@ohos/libraryimageknife'
+import common from '@ohos.app.ability.common'
+
+@Entry
+@Component
+struct Index {
+
+
+ @State imageOption1:ImageKnifeOption = {
+ loadSrc: $r('app.media.icon'),
+
+ }
+ @State imageOption2:ImageKnifeOption = {
+ loadSrc: $r('app.media.icon'),
+
+ }
+ @State imageOption3:ImageKnifeOption = {
+ loadSrc: $r('app.media.icon'),
+
+ }
+
+ build() {
+ Scroll() {
+ Column() {
+ Button('点击加载网络图片').onClick(()=>{
+ this.imageOption2 = {
+ loadSrc: 'https://hbimg.huabanimg.com/cc6af25f8d782d3cf3122bef4e61571378271145735e9-vEVggB',
+ }
+ })
+ ImageKnifeComponent({imageKnifeOption:this.imageOption2}).width(300).height(300).backgroundColor(Color.Pink)
+
+ Button('点击加载本地文件').onClick(()=>{
+ let ctx:Object|undefined = ImageKnifeGlobal.getInstance().getHapContext();
+ if(ctx != undefined){
+ let path = (ctx as common.UIAbilityContext).filesDir+"/set.jpeg";
+ FileUtils.getInstance().readFilePicAsync(path).then(buffer=>{
+ this.imageOption3 = {
+ loadSrc: path
+ }
+ })
+ }
+
+ })
+ ImageKnifeComponent({imageKnifeOption:this.imageOption3}).width(300).height(300).backgroundColor(Color.Pink)
+
+
+
+ }.width('100%')
+
+ }
+ .width('100%')
+ .height('100%')
+ }
+}
\ No newline at end of file
diff --git a/entry/src/main/ets/pages/imageknifeTestCaseIndex.ets b/entry/src/main/ets/pages/imageknifeTestCaseIndex.ets
index ac79068..dc48f66 100644
--- a/entry/src/main/ets/pages/imageknifeTestCaseIndex.ets
+++ b/entry/src/main/ets/pages/imageknifeTestCaseIndex.ets
@@ -106,11 +106,7 @@ struct IndexFunctionDemo {
console.log("测试预加载")
router.pushUrl({ url: "pages/testPreloadPage" });
}).margin({ top: 5, left: 3 })
- Button("测试gif静态动态切换")
- .onClick(() => {
- console.log("测试gif静态动态切换")
- router.pushUrl({ url: "pages/testGifDontAnimatePage" });
- }).margin({ top: 5, left: 3 })
+
}.width('100%').height(60).backgroundColor(Color.Pink)
Text("测试图片变换 图片压缩 功能点").fontSize(15)
@@ -184,10 +180,9 @@ struct IndexFunctionDemo {
router.pushUrl({ url: "pages/svgTestCasePage" });
}).margin({ top: 15 })
- Button("测试gif")
+ Button("测试gif已转移,不再使用worker")
.onClick(() => {
- console.log("pages/gifTestCasePage 页面跳转")
- router.pushUrl({ url: "pages/gifTestCasePage" });
+
}).margin({ top: 15 })
}.width('100%').height(60).backgroundColor(Color.Pink)
Flex({ direction: FlexDirection.Row, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
@@ -248,10 +243,28 @@ struct IndexFunctionDemo {
Text("worker测试").fontSize(15)
Flex({ direction: FlexDirection.Row, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
- Button("gif加载有无worker的区别测试")
+ Button("gif加载测试已转移,worker不再使用")
.onClick(() => {
- router.pushUrl({ url: "pages/testGifLoadWithWorkerPage" });
+
}).margin({ top: 5, left: 3 })
+ }.width('100%').height(60).backgroundColor(Color.Pink)
+ Text("HSP相关测试").fontSize(15)
+ Flex({ direction: FlexDirection.Row, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
+
+ Button("进入HSP的library共享包")
+ .onClick(() => {
+ router.pushUrl({url:'@bundle:com.openharmony.imageknife/library/ets/pages/Index'})
+ .then(()=>{
+ console.log('push page suceess')
+ })
+ }).margin({ top: 15 })
+ Button("hsp加载后缓存情况,先点左侧按钮")
+ .onClick(() => {
+ console.log("pages/tempUrlTestPage 页面跳转")
+ router.pushUrl({ url: "pages/hspCacheTestPage" });
+ }).margin({ top: 15 })
+
+
}.width('100%').height(60).backgroundColor(Color.Pink)
Text("测试图片加载稳定").fontSize(15)
Flex({ direction: FlexDirection.Row, alignItems: ItemAlign.Center, justifyContent: FlexAlign.Center }) {
diff --git a/entry/src/main/ets/pages/index.ets b/entry/src/main/ets/pages/index.ets
index 7d6dd99..1c1459d 100644
--- a/entry/src/main/ets/pages/index.ets
+++ b/entry/src/main/ets/pages/index.ets
@@ -18,14 +18,14 @@ import {
ImageKnifeOption,
ImageKnifeGlobal,
ImageKnife
-} from '@ohos/imageknife'
-import worker from '@ohos.worker';
-import { ObjectKey } from '@ohos/imageknife/src/main/ets/components/imageknife/ObjectKey';
+} from '@ohos/libraryimageknife'
+
+import { ObjectKey } from '@ohos/libraryimageknife';
@Entry
@Component
struct IndexFunctionDemo {
- private globalGifWorker?:worker.ThreadWorker = undefined
+
@State imageKnifeOption1: ImageKnifeOption =
{
loadSrc: $r('app.media.icon'),
@@ -90,20 +90,10 @@ struct IndexFunctionDemo {
}
aboutToAppear() {
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- // gif解析在子线程,请在页面构建后创建worker,注入imageknife
- let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife != undefined) {
- imageKnife?.setGifWorker(this.globalGifWorker)
- }
+
}
aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate();
- }
+
}
onBackPress() {
diff --git a/entry/src/main/ets/pages/manyPhotoShowPage.ets b/entry/src/main/ets/pages/manyPhotoShowPage.ets
index 570a80f..dffbb0a 100644
--- a/entry/src/main/ets/pages/manyPhotoShowPage.ets
+++ b/entry/src/main/ets/pages/manyPhotoShowPage.ets
@@ -12,10 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeComponent, ScaleType} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
+import {ImageKnifeComponent, ScaleType} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal} from '@ohos/libraryimageknife'
+import {RotateImageTransformation} from '@ohos/libraryimageknife'
import {Material} from './model/Material'
import {TestDataSource} from './model/TestDataSource'
import {DiskLruCache} from '@ohos/disklrucache'
diff --git a/entry/src/main/ets/pages/photosPausedResumedPage.ets b/entry/src/main/ets/pages/photosPausedResumedPage.ets
index 9379057..4105d79 100644
--- a/entry/src/main/ets/pages/photosPausedResumedPage.ets
+++ b/entry/src/main/ets/pages/photosPausedResumedPage.ets
@@ -12,10 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeComponent, ScaleType} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
+import {ImageKnifeComponent, ScaleType,ImageKnife} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal} from '@ohos/libraryimageknife'
+import {RotateImageTransformation} from '@ohos/libraryimageknife'
import {Material} from './model/Material'
import {TestDataSource} from './model/TestDataSource'
import {DiskLruCache} from '@ohos/disklrucache'
@@ -34,7 +34,7 @@ struct photosPausedResumedPage {
Button('点击暂停加载')
.margin({top:10,bottom:5})
.onClick(()=>{
- let imageKnife = ImageKnifeGlobal.getInstance().getImageKnife();
+ let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife();
if(imageKnife!= undefined){
imageKnife.pauseRequests()
}
@@ -43,7 +43,7 @@ struct photosPausedResumedPage {
Button('点击重新加载')
.margin({top:10,bottom:5})
.onClick(()=>{
- let imageKnife = ImageKnifeGlobal.getInstance().getImageKnife();
+ let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife();
if(imageKnife!= undefined){
imageKnife.resumeRequests()
}
diff --git a/entry/src/main/ets/pages/pngjTestCasePage.ets b/entry/src/main/ets/pages/pngjTestCasePage.ets
index 15ec0fe..a7ff0e1 100644
--- a/entry/src/main/ets/pages/pngjTestCasePage.ets
+++ b/entry/src/main/ets/pages/pngjTestCasePage.ets
@@ -13,9 +13,9 @@
* limitations under the License.
*/
import router from '@system.router';
-import { Pngj } from '@ohos/imageknife'
+import { Pngj } from '@ohos/libraryimageknife'
import resourceManager from '@ohos.resourceManager';
-import { FileUtils,ImageKnifeGlobal } from '@ohos/imageknife'
+import { FileUtils,ImageKnifeGlobal } from '@ohos/libraryimageknife'
import featureability from '@ohos.ability.featureAbility'
import ArkWorker from '@ohos.worker'
import worker from '@ohos.worker';
@@ -115,7 +115,7 @@ struct PngjTestCasePage {
if (!this.pngdecodeRun2) {
this.pngdecodeRun2 = true;
let pngj = new Pngj();
- let png_worker = new worker.ThreadWorker('entry/ets/workers/PngLoadWorker.ts', {
+ let png_worker = new worker.ThreadWorker('entry/ets/workers/upngWorkerTestCase.ets', {
type: 'classic',
name: 'readPngImageAsync'
})
@@ -161,7 +161,7 @@ struct PngjTestCasePage {
if (!this.pngdecodeRun3) {
this.pngdecodeRun3 = true;
let pngj = new Pngj();
- let png_worker = new worker.ThreadWorker('entry/ets/workers/PngLoadWorker.ts', {
+ let png_worker = new worker.ThreadWorker('entry/ets/workers/upngWorkerTestCase.ets', {
type: 'classic',
name: 'writePngWithStringAsync'
})
@@ -210,7 +210,7 @@ struct PngjTestCasePage {
if (!this.pngdecodeRun4) {
this.pngdecodeRun4 = true;
let pngj = new Pngj();
- let png_worker = new worker.ThreadWorker('entry/ets/workers/PngLoadWorker.ts', {
+ let png_worker = new worker.ThreadWorker('entry/ets/workers/upngWorkerTestCase.ets', {
type: 'classic',
name: 'writePngAsync'
})
diff --git a/entry/src/main/ets/pages/showErrorholderTestCasePage.ets b/entry/src/main/ets/pages/showErrorholderTestCasePage.ets
index ad82adf..15ea013 100644
--- a/entry/src/main/ets/pages/showErrorholderTestCasePage.ets
+++ b/entry/src/main/ets/pages/showErrorholderTestCasePage.ets
@@ -12,10 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeComponent} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
-import {RoundedCornersTransformation} from '@ohos/imageknife'
+import {ImageKnifeComponent} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {RotateImageTransformation} from '@ohos/libraryimageknife'
+import {RoundedCornersTransformation} from '@ohos/libraryimageknife'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/storageTestLruCache.ets b/entry/src/main/ets/pages/storageTestLruCache.ets
index abe8e07..018c002 100644
--- a/entry/src/main/ets/pages/storageTestLruCache.ets
+++ b/entry/src/main/ets/pages/storageTestLruCache.ets
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-import {LruCache} from '@ohos/imageknife'
+import {LruCache} from '@ohos/libraryimageknife'
function getRandomInt(min:number, max:number):number {
min = Math.ceil(min);
diff --git a/entry/src/main/ets/pages/svgTestCasePage.ets b/entry/src/main/ets/pages/svgTestCasePage.ets
index 6157a96..edcd299 100644
--- a/entry/src/main/ets/pages/svgTestCasePage.ets
+++ b/entry/src/main/ets/pages/svgTestCasePage.ets
@@ -12,8 +12,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {SVGParseImpl} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
+import {SVGParseImpl} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal} from '@ohos/libraryimageknife'
import resourceManager from '@ohos.resourceManager';
import {BusinessError} from '@ohos.base'
import common from '@ohos.app.ability.common';
diff --git a/entry/src/main/ets/pages/tempUrlTestPage.ets b/entry/src/main/ets/pages/tempUrlTestPage.ets
index 3353fdc..9261dbe 100644
--- a/entry/src/main/ets/pages/tempUrlTestPage.ets
+++ b/entry/src/main/ets/pages/tempUrlTestPage.ets
@@ -20,12 +20,12 @@ import {
ImageKnife,
ImageKnifeDrawFactory,
ScaleType
-} from '@ohos/imageknife'
+} from '@ohos/libraryimageknife'
import worker from '@ohos.worker';
@Entry
@Component
struct tempUrlTestPage {
- private globalGifWorker?:worker.ThreadWorker = undefined
+
@State imageKnifeOption1: ImageKnifeOption =
{
loadSrc: $r('app.media.icon'),
@@ -86,20 +86,10 @@ struct tempUrlTestPage {
}
aboutToAppear() {
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife != undefined) {
- // gif解析在子线程,请在页面构建后创建worker,注入imageknife
- imageKnife.setGifWorker(this.globalGifWorker)
- }
+
}
aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate();
- }
+
}
onBackPress() {
diff --git a/entry/src/main/ets/pages/testAllCacheInfoPage.ets b/entry/src/main/ets/pages/testAllCacheInfoPage.ets
index 9d5003f..f39920a 100644
--- a/entry/src/main/ets/pages/testAllCacheInfoPage.ets
+++ b/entry/src/main/ets/pages/testAllCacheInfoPage.ets
@@ -12,14 +12,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {RequestOption} from '@ohos/imageknife'
-import {ImageKnifeData} from '@ohos/imageknife'
-import {AllCacheInfo,IAllCacheInfoCallback} from '@ohos/imageknife'
-import {ImageKnifeComponent} from '@ohos/imageknife'
-import {TransformType} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
+import {RequestOption} from '@ohos/libraryimageknife'
+import {ImageKnifeData} from '@ohos/libraryimageknife'
+import {AllCacheInfo,IAllCacheInfoCallback} from '@ohos/libraryimageknife'
+import {ImageKnifeComponent} from '@ohos/libraryimageknife'
+import {TransformType} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal} from '@ohos/libraryimageknife'
+import {RotateImageTransformation} from '@ohos/libraryimageknife'
import {BusinessError} from '@ohos.base'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/testGifDontAnimatePage.ets b/entry/src/main/ets/pages/testGifDontAnimatePage.ets
deleted file mode 100644
index 69a56cc..0000000
--- a/entry/src/main/ets/pages/testGifDontAnimatePage.ets
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright (C) 2021 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import {ImageKnifeComponent} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
-import {ImageKnife} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
-import worker from '@ohos.worker'
-@Entry
-@Component
-struct TestGifDontAnimatePage {
- private globalGifWorker?:worker.ThreadWorker = undefined
- @State imageKnifeOption1: ImageKnifeOption =
- {
- loadSrc: $r('app.media.jpgSample'),
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed')
- };
-
-
- build() {
- Scroll() {
- Column() {
- Flex({ direction: FlexDirection.Row }) {
- Button('本地资源gif')
- .onClick(() => {
- this.imageKnifeOption1 = {
- loadSrc: $r('app.media.gifSample'),
-
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed'),
- }
-
- }).margin({left:15}).backgroundColor(Color.Grey)
- Button('本地资源gif静态')
- .onClick(()=>{
- this.imageKnifeOption1 = {
- loadSrc: $r('app.media.gifSample'),
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed'),
- dontAnimateFlag:true
- }
-
- }).margin({left:15}).backgroundColor(Color.Grey)
- }
- .margin({top:15})
- Flex({direction:FlexDirection.Row}){
- Button('网络资源gif')
- .onClick(()=>{
- this.imageKnifeOption1 = {
- loadSrc: 'https://gd-hbimg.huaban.com/e0a25a7cab0d7c2431978726971d61720732728a315ae-57EskW_fw658',
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed'),
- };
- }).margin({left:15}).backgroundColor(Color.Grey)
- Button('网络资源gif静态')
- .onClick(()=>{
- this.imageKnifeOption1 = {
- loadSrc: 'https://gd-hbimg.huaban.com/e0a25a7cab0d7c2431978726971d61720732728a315ae-57EskW_fw658',
- placeholderSrc: $r('app.media.icon_loading'),
- errorholderSrc: $r('app.media.icon_failed'),
- dontAnimateFlag:true
- };
- }).margin({left:15}).backgroundColor(Color.Grey)
- }
- .margin({top:15})
- ImageKnifeComponent({ imageKnifeOption: this.imageKnifeOption1 })
- .width(300)
- .height(300)
-
- }
- }
- .width('100%')
- .height('100%')
- }
-
- aboutToAppear() {
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- let imageKnife : ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife!= undefined) {
- imageKnife.setGifWorker(this.globalGifWorker)
- }
- }
- aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate();
- }
- }
-}
-
diff --git a/entry/src/main/ets/pages/testGifLoadWithWorkerPage.ets b/entry/src/main/ets/pages/testGifLoadWithWorkerPage.ets
deleted file mode 100644
index 3f044a4..0000000
--- a/entry/src/main/ets/pages/testGifLoadWithWorkerPage.ets
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Copyright (C) 2021 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import { ImageKnife,ImageKnifeGlobal, ImageKnifeComponent, ImageKnifeOption } from '@ohos/imageknife'
-import worker, { MessageEvents } from '@ohos.worker'
-import Prompt from '@system.prompt'
-
-@Entry
-@Component
-struct TestGifLoadWithWorkerPage {
- @State message: string = 'Hello Worker'
- @State options: ImageKnifeOption = {
- loadSrc: $r('app.media.icon')
- }
- private my_worker?: worker.ThreadWorker = undefined;
- private my_gif_worker?: worker.ThreadWorker = undefined;
-
- /**
- * 界面进入时回调
- */
- aboutToAppear() {
- console.log("aboutToAppear")
- }
-
- /**
- * 界面退出时回调
- */
- aboutToDisappear() {
- console.log("aboutToDisappear")
- if (this.my_worker) {
- this.my_worker.terminate()
- }
- if (this.my_gif_worker) {
- this.my_gif_worker.terminate()
- }
- }
-
- build() {
- Column() {
- Column() {
- Text(`${this.message}`)
- .fontSize("50fp")
- .fontWeight(FontWeight.Bold)
-
- Button('发worker消息')
- .margin({ top: 16 })
- .onClick(() => {
- console.log("button 发worker消息 onClick()")
-
- //脚本路径不要添加src/main
- if (!this.my_worker) {
- this.my_worker = new worker.ThreadWorker('entry/ets/workers/MyWorker.ts')
- }
- this.my_worker.onmessage = (e: MessageEvents) => {
- console.log("my_worker.onmessage:" + e.data)
-
- //接收子线程传来的消息
- //将从后台线程返回的数据通过toast的方式显示出来
- Prompt.showToast({ message: e.data })
- }
- //主线程向子线程发送消息
- this.my_worker.postMessage('好好学习')
- })
- Text("向子线程发送【好好学习】,并从其得到【天天向上】的应答")
-
- Button('加载gif')
- .margin({ top: 16 })
- .onClick(() => {
- console.log("button 加载gif onClick()")
-
- //初始化自定义worker
- if (!this.my_gif_worker) {
- this.my_gif_worker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts')
- }
- this.my_gif_worker.onmessage = (e: MessageEvents) => {
- console.log("my_gif_worker.onmessage: " + e.data)
- }
- (ImageKnifeGlobal.getInstance().getImageKnife())?.setGifWorker(this.my_gif_worker)
-
- //子线程加载gif,不阻塞toast的消失
- this.options = {
- loadSrc: $r('app.media.gifSample'),
- }
-
- Prompt.showToast({ message: '加载gif中,请稍等' })
- })
- Text('worker子线程解析gif,不阻塞主线程中toast的消失,解析耗时近45s,请耐心等待')
-
- Button('加载gif')
- .margin({ top: 16 })
- .onClick(() => {
-
- let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife();
- if(imageKnife != undefined) {
- imageKnife.setGifWorker(undefined)
- }
- //主线程加载gif,阻塞toast的消失
- this.options = {
- loadSrc: $r("app.media.test"),
- }
-
- Prompt.showToast({ message: '加载gif中,请稍等' })
- })
- Text('主线程解析gif,会阻塞主线程中toast的消失,并可能会触发THREAD_BLOCK_6S异常,解析耗时近20s,请耐心等待')
-
- ImageKnifeComponent({ imageKnifeOption: this.options })
- .width(300)
- .height(300)
- .margin(16)
- }
- .width("100%")
- .height("100%")
- .justifyContent(FlexAlign.Center)
- }
- .width("100%")
- .height("100%")
- }
-}
\ No newline at end of file
diff --git a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage.ets b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage.ets
index 687a98b..e2aaf7c 100644
--- a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage.ets
+++ b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage.ets
@@ -12,19 +12,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeComponent} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
-import {ImageKnife} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
+import {ImageKnifeComponent} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal} from '@ohos/libraryimageknife'
+import {ImageKnife} from '@ohos/libraryimageknife'
+import {RotateImageTransformation} from '@ohos/libraryimageknife'
import ArkWorker from '@ohos.worker'
import worker from '@ohos.worker'
@Entry
@Component
struct TestImageKnifeOptionChangedPage {
- private globalGifWorker?:worker.ThreadWorker = undefined
-
@State imageKnifeOption1: ImageKnifeOption =
{
loadSrc: $r('app.media.jpgSample'),
@@ -187,20 +185,10 @@ struct TestImageKnifeOptionChangedPage {
}
aboutToAppear() {
- console.log("aboutToAppear()")
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife != undefined) {
- imageKnife.setGifWorker(this.globalGifWorker)
- }
+
}
aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate();
- }
+
}
}
diff --git a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage2.ets b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage2.ets
index 1966689..44c646f 100644
--- a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage2.ets
+++ b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage2.ets
@@ -12,12 +12,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeComponent} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {BaseTransform} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
-import {GrayscaleTransformation} from '@ohos/imageknife'
-import {SketchFilterTransformation} from '@ohos/imageknife'
+import {ImageKnifeComponent} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {BaseTransform} from '@ohos/libraryimageknife'
+import {RotateImageTransformation} from '@ohos/libraryimageknife'
+import {GrayscaleTransformation} from '@ohos/libraryimageknife'
+import {SketchFilterTransformation} from '@ohos/libraryimageknife'
import image from '@ohos.multimedia.image'
@Entry
diff --git a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage3.ets b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage3.ets
index 8871b1a..7c988c2 100644
--- a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage3.ets
+++ b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage3.ets
@@ -12,22 +12,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeComponent} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {ImageKnifeGlobal} from '@ohos/imageknife'
-import {ImageKnife} from '@ohos/imageknife'
-import {ScaleType} from '@ohos/imageknife'
-import {RotateImageTransformation} from '@ohos/imageknife'
-import {GrayscaleTransformation} from '@ohos/imageknife'
-import {SketchFilterTransformation} from '@ohos/imageknife'
+import {ImageKnifeComponent} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal} from '@ohos/libraryimageknife'
+import {ImageKnife} from '@ohos/libraryimageknife'
+import {ScaleType} from '@ohos/libraryimageknife'
+import {RotateImageTransformation} from '@ohos/libraryimageknife'
+import {GrayscaleTransformation} from '@ohos/libraryimageknife'
+import {SketchFilterTransformation} from '@ohos/libraryimageknife'
import ArkWorker from '@ohos.worker'
import worker from '@ohos.worker'
-import {BaseTransform} from '@ohos/imageknife'
+import {BaseTransform} from '@ohos/libraryimageknife'
import image from '@ohos.multimedia.image'
@Entry
@Component
struct TestImageKnifeOptionChangedPage3 {
- private globalGifWorker?:worker.ThreadWorker = undefined
+
@State imageKnifeOption1: ImageKnifeOption =
{
loadSrc: $r('app.media.jpgSample'),
@@ -158,20 +158,9 @@ struct TestImageKnifeOptionChangedPage3 {
.height('100%')
}
aboutToAppear() {
- console.log("aboutToAppear()")
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife != undefined) {
- imageKnife.setGifWorker(this.globalGifWorker)
- }
+
}
aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate();
- }
}
}
diff --git a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage4.ets b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage4.ets
index 5585b14..441c02b 100644
--- a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage4.ets
+++ b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage4.ets
@@ -24,7 +24,7 @@ import {
ScaleType,
ImageKnifeGlobal,
BaseTransform
-} from '@ohos/imageknife'
+} from '@ohos/libraryimageknife'
import ArkWorker from '@ohos.worker'
import worker from '@ohos.worker';
import image from '@ohos.multimedia.image';
@@ -32,7 +32,7 @@ import image from '@ohos.multimedia.image';
@Entry
@Component
struct TestImageKnifeOptionChangedPage4 {
- private globalGifWorker?: worker.ThreadWorker = undefined
+
@State imageKnifeOption1: ImageKnifeOption =
{
loadSrc: $r('app.media.jpgSample'),
@@ -119,18 +119,11 @@ struct TestImageKnifeOptionChangedPage4 {
}
aboutToAppear() {
- console.log("aboutToAppear()")
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- ImageKnifeGlobal.getInstance().getImageKnife()?.setGifWorker(this.globalGifWorker)
+
}
aboutToDisappear() {
- if (this.globalGifWorker) {
- this.globalGifWorker.terminate();
- }
+
}
drawMainAnimate_index: number = 0;
diff --git a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage5.ets b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage5.ets
index ef672cd..47c1119 100644
--- a/entry/src/main/ets/pages/testImageKnifeOptionChangedPage5.ets
+++ b/entry/src/main/ets/pages/testImageKnifeOptionChangedPage5.ets
@@ -25,12 +25,12 @@ import {
IDrawLifeCycle,
ScaleType,
ImageKnifeDrawFactory
-} from '@ohos/imageknife'
+} from '@ohos/libraryimageknife'
import worker from '@ohos.worker';
@Entry
@Component
struct TestImageKnifeOptionChangedPage5 {
- private globalGifWorker?:worker.ThreadWorker = undefined
+
@State imageKnifeOption1: ImageKnifeOption =
{
loadSrc: $r('app.media.jpgSample'),
@@ -38,7 +38,7 @@ struct TestImageKnifeOptionChangedPage5 {
errorholderSrc: $r('app.media.icon_failed'),
drawLifeCycle:this.choiceViewLifeCycle(DrawType.Oval)
};
- private mTimerId: number = 0
+
build() {
Scroll() {
@@ -96,20 +96,10 @@ struct TestImageKnifeOptionChangedPage5 {
}
aboutToAppear() {
- console.log("aboutToAppear()")
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- let imageKnife:ImageKnife|undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife != undefined) {
- imageKnife.setGifWorker(this.globalGifWorker)
- }
+
}
aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate();
- }
+
}
private choiceViewLifeCycle(type:DrawType): IDrawLifeCycle {
diff --git a/entry/src/main/ets/pages/testManyGifLoadWithPage.ets b/entry/src/main/ets/pages/testManyGifLoadWithPage.ets
index 89ad8a1..39d354b 100644
--- a/entry/src/main/ets/pages/testManyGifLoadWithPage.ets
+++ b/entry/src/main/ets/pages/testManyGifLoadWithPage.ets
@@ -12,7 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import { ImageKnife, ImageKnifeComponent, ImageKnifeGlobal, ImageKnifeOption } from "@ohos/imageknife"
+import { ImageKnife, ImageKnifeComponent, ImageKnifeGlobal, ImageKnifeOption } from "@ohos/libraryimageknife"
import worker from '@ohos.worker';
let gifUrl = "https://gw.alicdn.com/tfs/TB1E3H5t8Bh1e4jSZFhXXcC9VXa-198-198.gif"
@@ -29,7 +29,7 @@ let data: string[] = [
@Entry
@Component
struct TestManyGifLoadWithPage {
- private globalGifWorker?: worker.ThreadWorker = undefined
+
@State p1: PixelMap | undefined = undefined
@State p2: PixelMap | undefined = undefined
@State workerOption: ImageKnifeOption = {
@@ -91,18 +91,9 @@ struct TestManyGifLoadWithPage {
}.width('100%').height('100%').backgroundColor(0xF1F3F5)
}
aboutToAppear(){
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts',{
- type:'classic',
- name:'ImageKnifeParseGIF'
- })
- let imageKnife:ImageKnife | undefined = ImageKnifeGlobal.getInstance().getImageKnife()
- if(imageKnife != undefined){
- imageKnife?.setGifWorker(this.globalGifWorker)
- }
+
}
aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate()
- }
+
}
}
\ No newline at end of file
diff --git a/entry/src/main/ets/pages/testManyNetImageLoadWithPage.ets b/entry/src/main/ets/pages/testManyNetImageLoadWithPage.ets
index 180e286..b917aec 100644
--- a/entry/src/main/ets/pages/testManyNetImageLoadWithPage.ets
+++ b/entry/src/main/ets/pages/testManyNetImageLoadWithPage.ets
@@ -12,7 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import { ScaleType, ImageKnifeComponent } from "@ohos/imageknife"
+import { ScaleType, ImageKnifeComponent } from "@ohos/libraryimageknife"
class CommonDataSource implements IDataSource {
private dataArray: T[] = []
diff --git a/entry/src/main/ets/pages/testPreloadPage.ets b/entry/src/main/ets/pages/testPreloadPage.ets
index 7924e71..7d0272a 100644
--- a/entry/src/main/ets/pages/testPreloadPage.ets
+++ b/entry/src/main/ets/pages/testPreloadPage.ets
@@ -12,18 +12,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import {ImageKnifeComponent} from '@ohos/imageknife'
-import {ImageKnifeData} from '@ohos/imageknife'
-import {ImageKnifeOption} from '@ohos/imageknife'
-import {RequestOption} from '@ohos/imageknife'
-import {ImageKnifeGlobal } from '@ohos/imageknife'
+import {ImageKnifeComponent} from '@ohos/libraryimageknife'
+import {ImageKnifeData} from '@ohos/libraryimageknife'
+import {ImageKnifeOption} from '@ohos/libraryimageknife'
+import {RequestOption} from '@ohos/libraryimageknife'
+import {ImageKnifeGlobal } from '@ohos/libraryimageknife'
import worker from '@ohos.worker'
import { BusinessError } from '@ohos.base'
@Entry
@Component
struct TestPreloadPage {
- private globalGifWorker?:worker.ThreadWorker = undefined
+
@State imageKnifeOption1: ImageKnifeOption =
{
loadSrc: $r('app.media.jpgSample'),
@@ -617,17 +617,10 @@ struct TestPreloadPage {
}
aboutToAppear() {
- this.globalGifWorker = new worker.ThreadWorker('entry/ets/workers/GifLoadWorker.ts', {
- type: 'classic',
- name: 'ImageKnifeParseGIF'
- })
- // gif解析在子线程,请在页面构建后创建worker,注入imageknife
- ImageKnifeGlobal.getInstance().getImageKnife()?.setGifWorker(this.globalGifWorker)
+
}
aboutToDisappear(){
- if(this.globalGifWorker){
- this.globalGifWorker.terminate();
- }
+
}
}
diff --git a/entry/src/main/ets/pages/testSingleFrameGifPage.ets b/entry/src/main/ets/pages/testSingleFrameGifPage.ets
index 0e292c6..7e7a8f7 100644
--- a/entry/src/main/ets/pages/testSingleFrameGifPage.ets
+++ b/entry/src/main/ets/pages/testSingleFrameGifPage.ets
@@ -12,8 +12,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import { ImageKnifeComponent } from '@ohos/imageknife'
-import { ImageKnifeOption } from '@ohos/imageknife'
+import { ImageKnifeComponent } from '@ohos/libraryimageknife'
+import { ImageKnifeOption } from '@ohos/libraryimageknife'
@Entry
@Component
diff --git a/entry/src/main/ets/pages/transformPixelMapPage.ets b/entry/src/main/ets/pages/transformPixelMapPage.ets
index 226b8ed..745d1af 100644
--- a/entry/src/main/ets/pages/transformPixelMapPage.ets
+++ b/entry/src/main/ets/pages/transformPixelMapPage.ets
@@ -12,28 +12,28 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-import { RequestOption,ImageKnifeGlobal} from '@ohos/imageknife'
-import { CropCircleTransformation } from '@ohos/imageknife'
-import { RoundedCornersTransformation } from '@ohos/imageknife'
+import { RequestOption,ImageKnifeGlobal} from '@ohos/libraryimageknife'
+import { CropCircleTransformation } from '@ohos/libraryimageknife'
+import { RoundedCornersTransformation } from '@ohos/libraryimageknife'
import {
- CropCircleWithBorderTransformation
-} from '@ohos/imageknife'
-import { RotateImageTransformation } from '@ohos/imageknife'
-import { CropSquareTransformation } from '@ohos/imageknife'
-import { CropTransformation } from '@ohos/imageknife'
-import { CropType } from '@ohos/imageknife'
-import { GrayscaleTransformation } from '@ohos/imageknife'
-import { BrightnessFilterTransformation } from '@ohos/imageknife'
-import { ContrastFilterTransformation } from '@ohos/imageknife'
-import { InvertFilterTransformation } from '@ohos/imageknife'
-import { SepiaFilterTransformation } from '@ohos/imageknife'
-import { SketchFilterTransformation } from '@ohos/imageknife'
-import { BlurTransformation } from '@ohos/imageknife'
-import { PixelationFilterTransformation } from '@ohos/imageknife'
-import { MaskTransformation } from '@ohos/imageknife'
-import { SwirlFilterTransformation } from '@ohos/imageknife'
+ rgbColor,CropCircleWithBorderTransformation
+} from '@ohos/libraryimageknife'
+import { RotateImageTransformation } from '@ohos/libraryimageknife'
+import { CropSquareTransformation } from '@ohos/libraryimageknife'
+import { CropTransformation } from '@ohos/libraryimageknife'
+import { CropType } from '@ohos/libraryimageknife'
+import { GrayscaleTransformation } from '@ohos/libraryimageknife'
+import { BrightnessFilterTransformation } from '@ohos/libraryimageknife'
+import { ContrastFilterTransformation } from '@ohos/libraryimageknife'
+import { InvertFilterTransformation } from '@ohos/libraryimageknife'
+import { SepiaFilterTransformation } from '@ohos/libraryimageknife'
+import { SketchFilterTransformation } from '@ohos/libraryimageknife'
+import { BlurTransformation } from '@ohos/libraryimageknife'
+import { PixelationFilterTransformation } from '@ohos/libraryimageknife'
+import { MaskTransformation } from '@ohos/libraryimageknife'
+import { SwirlFilterTransformation } from '@ohos/libraryimageknife'
import { BusinessError } from '@ohos.base'
-import {ImageKnifeData} from '@ohos/imageknife'
+import {ImageKnifeData} from '@ohos/libraryimageknife'
/**
* PixelMap transform 示例
*/
@@ -635,8 +635,9 @@ struct TransformPixelMapPage {
*/
circleBorderTransformation(border: number) {
let imageKnifeOption = new RequestOption();
+ let color:rgbColor = { r_color: 255, g_color: 204, b_color: 204 }
let circleTransformation = new CropCircleWithBorderTransformation(border,
- { r_color: 255, g_color: 204, b_color: 204 });
+ color);
imageKnifeOption.load(mUrl)
.addListener({callback:(err:BusinessError|string, data:ImageKnifeData) => {
diff --git a/entry/src/main/ets/workers/GifLoadWorker.ts b/entry/src/main/ets/workers/GifLoadWorker.ts
deleted file mode 100644
index 50e960f..0000000
--- a/entry/src/main/ets/workers/GifLoadWorker.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright (C) 2021 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import worker, { ThreadWorkerGlobalScope, MessageEvents, ErrorEvent } from '@ohos.worker';
-import { gifHandler } from '@ohos/imageknife/GifWorker'
-
-var workerPort: ThreadWorkerGlobalScope = worker.workerPort;
-
-/**
- * Defines the event handler to be called when the worker thread receives a message sent by the host thread.
- * The event handler is executed in the worker thread.
- *
- * 代码参考自@ohos/imageknife/GifWorker中gifHandler方法
- *
- * @param e message data
- */
-workerPort.onmessage = gifHandler
-
-/**
- * Defines the event handler to be called when the worker receives a message that cannot be deserialized.
- * The event handler is executed in the worker thread.
- *
- * @param e message data
- */
-workerPort.onmessageerror = function (e: MessageEvents) {
- console.log("GifLoadWorker.ts workerPort.onmessageerror: " + e.data)
-}
-
-/**
- * Defines the event handler to be called when an exception occurs during worker execution.
- * The event handler is executed in the worker thread.
- *
- * @param e error message
- */
-workerPort.onerror = function (e: ErrorEvent) {
- console.log("GifLoadWorker.ts workerPort.onerror: " + e.message)
-}
\ No newline at end of file
diff --git a/entry/src/main/ets/workers/PngLoadWorker.ts b/entry/src/main/ets/workers/upngWorkerDepend.ts
similarity index 87%
rename from entry/src/main/ets/workers/PngLoadWorker.ts
rename to entry/src/main/ets/workers/upngWorkerDepend.ts
index 5bb1b01..74f75bb 100644
--- a/entry/src/main/ets/workers/PngLoadWorker.ts
+++ b/entry/src/main/ets/workers/upngWorkerDepend.ts
@@ -14,35 +14,57 @@
* limitations under the License.
*/
import worker, { ThreadWorkerGlobalScope, MessageEvents, ErrorEvent } from '@ohos.worker';
-import { UPNG } from '@ohos/imageknife/src/main/ets/components/3rd_party/upng/UPNG'
-var workerPort: ThreadWorkerGlobalScope = worker.workerPort;
+export function resolvePngWorker (workerPort,UPNG, e: MessageEvents) {
+
+ let data = e.data;
-workerPort.onmessage = function (e: MessageEvents) {
- var data = e.data;
switch (data.type) {
+
case 'readPngImageAsync':
- var png = UPNG.decode(data.data);
+
+ let png = UPNG.decode(data.data);
+
let array = png.data;
+
let arrayData = array.buffer.slice(array.byteOffset, array.byteLength + array.byteOffset)
+
png.data = arrayData;
+
let dataObj = { type: 'readPngImageAsync', data: png, receiver: data.data }
+
workerPort.postMessage(dataObj, [png.data, data.data]);
+
break;
+
case 'writePngWithStringAsync':
+
let addInfo = data.info;
+
let pngDecode = UPNG.decode(data.data);
+
let newPng = UPNG.encodeWithString(addInfo, UPNG.toRGBA8(pngDecode), pngDecode.width, pngDecode.height, 0)
+
let dataObj2 = { type: 'writePngWithStringAsync', data: newPng, receiver: data.data }
+
workerPort.postMessage(dataObj2, [newPng, data.data]);
+
break;
+
case 'writePngAsync':
+
let pngDecode3 = UPNG.decode(data.data);
+
let newPng3 = UPNG.encode(UPNG.toRGBA8(pngDecode3), pngDecode3.width, pngDecode3.height, 0)
+
let dataObj3 = { type: 'writePngAsync', data: newPng3, receiver: data.data }
+
workerPort.postMessage(dataObj3, [newPng3, data.data]);
+
break;
+
default:
+
break
}
}
\ No newline at end of file
diff --git a/entry/src/main/ets/workers/MyWorker.ts b/entry/src/main/ets/workers/upngWorkerTestCase.ets
similarity index 60%
rename from entry/src/main/ets/workers/MyWorker.ts
rename to entry/src/main/ets/workers/upngWorkerTestCase.ets
index d4506c3..6b3e8fc 100644
--- a/entry/src/main/ets/workers/MyWorker.ts
+++ b/entry/src/main/ets/workers/upngWorkerTestCase.ets
@@ -14,20 +14,12 @@
* limitations under the License.
*/
import worker, { ThreadWorkerGlobalScope, MessageEvents, ErrorEvent } from '@ohos.worker';
+import { UPNG } from '@ohos/libraryimageknife'
+import {resolvePngWorker} from './upngWorkerDepend'
+const workerPort: ThreadWorkerGlobalScope = worker.workerPort;
-var workerPort: ThreadWorkerGlobalScope = worker.workerPort;
+workerPort.onmessage = (e: MessageEvents)=> {
-workerPort.onmessage = function (e: MessageEvents) {
- console.log("MyWorker.ts workerPort.onmessage:" + e.data)
+ resolvePngWorker(workerPort,UPNG,e)
- //子线程向主线程发送消息
- workerPort.postMessage("天天向上")
-}
-
-workerPort.onmessageerror = function (e: MessageEvents) {
- console.log("MyWorker.ts workerPort.onmessageerror")
-}
-
-workerPort.onerror = function (e: ErrorEvent) {
- console.log("MyWorker.ts workerPort.onerror")
}
\ No newline at end of file
diff --git a/entry/src/main/resources/base/profile/main_pages.json b/entry/src/main/resources/base/profile/main_pages.json
index 8332d5c..eadd98b 100644
--- a/entry/src/main/resources/base/profile/main_pages.json
+++ b/entry/src/main/resources/base/profile/main_pages.json
@@ -11,7 +11,6 @@
"pages/pngjTestCasePage",
"pages/showErrorholderTestCasePage",
"pages/transformPixelMapPage",
- "pages/testGifDontAnimatePage",
"pages/testPreloadPage",
"pages/testImageKnifeOptionChangedPage",
"pages/testImageKnifeOptionChangedPage2",
@@ -22,7 +21,6 @@
"pages/testAllCacheInfoPage",
"pages/cropImagePage2",
"pages/svgTestCasePage",
- "pages/gifTestCasePage",
"pages/imageknifeTestCaseIndex",
"pages/dataShareUriLoadPage",
"pages/manyPhotoShowPage",
@@ -30,9 +28,9 @@
"pages/tempUrlTestPage",
"pages/drawFactoryTestPage",
"pages/testSingleFrameGifPage",
- "pages/testGifLoadWithWorkerPage",
"pages/OptionTestPage",
"pages/SignatureTestPage",
+ "pages/hspCacheTestPage",
"pages/testManyNetImageLoadWithPage",
"pages/testManyGifLoadWithPage"
]
diff --git a/entry/src/ohosTest/ets/test/imageknife.test.ets b/entry/src/ohosTest/ets/test/imageknife.test.ets
index 368a7a6..2186d29 100644
--- a/entry/src/ohosTest/ets/test/imageknife.test.ets
+++ b/entry/src/ohosTest/ets/test/imageknife.test.ets
@@ -14,7 +14,7 @@
*/
import hilog from '@ohos.hilog';
import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from '@ohos/hypium'
-import {ImageKnife,ImageKnifeDrawFactory,ImageKnifeGlobal} from '@ohos/imageknife'
+import {ImageKnife,ImageKnifeDrawFactory,ImageKnifeGlobal} from '@ohos/libraryimageknife'
export default function ImageKnifeTest() {
describe('ImageKnifeTest', ()=> {
diff --git a/entry/src/ohosTest/ets/test/logutil.test.ets b/entry/src/ohosTest/ets/test/logutil.test.ets
index 71f61a1..4ab90ac 100644
--- a/entry/src/ohosTest/ets/test/logutil.test.ets
+++ b/entry/src/ohosTest/ets/test/logutil.test.ets
@@ -14,7 +14,7 @@
*/
import hilog from '@ohos.hilog';
import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from '@ohos/hypium'
-import {LogUtil} from '@ohos/imageknife'
+import {LogUtil} from '@ohos/libraryimageknife'
export default function LogUtilTest() {
describe('LogUtilTest', ()=> {
diff --git a/entry/src/ohosTest/ets/test/lrucache.test.ets b/entry/src/ohosTest/ets/test/lrucache.test.ets
index 3238954..b5ef887 100644
--- a/entry/src/ohosTest/ets/test/lrucache.test.ets
+++ b/entry/src/ohosTest/ets/test/lrucache.test.ets
@@ -14,7 +14,7 @@
*/
import hilog from '@ohos.hilog';
import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from '@ohos/hypium'
-import {LruCache} from '@ohos/imageknife' // DiskLruCache用例由DiskLruCache三方库提供
+import {LruCache} from '@ohos/libraryimageknife' // DiskLruCache用例由DiskLruCache三方库提供
export default function lruCacheTest() {
describe('lruCacheTest', ()=> {
diff --git a/entry/src/ohosTest/ets/test/requestoption.test.ets b/entry/src/ohosTest/ets/test/requestoption.test.ets
index e2b0621..b83bd10 100644
--- a/entry/src/ohosTest/ets/test/requestoption.test.ets
+++ b/entry/src/ohosTest/ets/test/requestoption.test.ets
@@ -15,7 +15,7 @@
*/
import hilog from '@ohos.hilog';
import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from '@ohos/hypium'
-import {RequestOption,Size} from '@ohos/imageknife'
+import {RequestOption,Size} from '@ohos/libraryimageknife'
export default function RequestOptionTest() {
describe('RequestOptionTest', ()=> {
diff --git a/entry/src/ohosTest/ets/test/transfrom.test.ets b/entry/src/ohosTest/ets/test/transfrom.test.ets
index 8802154..c1b1e8c 100644
--- a/entry/src/ohosTest/ets/test/transfrom.test.ets
+++ b/entry/src/ohosTest/ets/test/transfrom.test.ets
@@ -36,7 +36,7 @@ import {
ToonFilterTransform,
VignetteFilterTransform,
-} from '@ohos/imageknife'
+} from '@ohos/libraryimageknife'
export default function Transform() {
describe('Transform', ()=>{
diff --git a/entry/src/ohosTest/ets/testability/TestAbility.ets b/entry/src/ohosTest/ets/testability/TestAbility.ets
index 22b0877..140dbda 100644
--- a/entry/src/ohosTest/ets/testability/TestAbility.ets
+++ b/entry/src/ohosTest/ets/testability/TestAbility.ets
@@ -18,7 +18,7 @@ import hilog from '@ohos.hilog';
import { Hypium } from '@ohos/hypium';
import testsuite from '../test/List.test';
import window from '@ohos.window';
-import {ImageKnife,ImageKnifeDrawFactory,ImageKnifeGlobal} from '@ohos/imageknife'
+import {ImageKnife,ImageKnifeDrawFactory,ImageKnifeGlobal} from '@ohos/libraryimageknife'
import AbilityConstant from '@ohos.app.ability.AbilityConstant';
import Want from '@ohos.app.ability.Want';
import { BusinessError } from '@ohos.base'
diff --git a/hvigor/hvigor-config.json5 b/hvigor/hvigor-config.json5
index e75281b..122677b 100644
--- a/hvigor/hvigor-config.json5
+++ b/hvigor/hvigor-config.json5
@@ -1,6 +1,6 @@
{
- "hvigorVersion": "3.0.2",
+ "hvigorVersion": "3.0.9",
"dependencies": {
- "@ohos/hvigor-ohos-plugin": "3.0.2"
+ "@ohos/hvigor-ohos-plugin": "3.0.9"
}
}
\ No newline at end of file
diff --git a/imageknife/GifWorker.ts b/imageknife/GifWorker.ts
deleted file mode 100644
index f9d1a61..0000000
--- a/imageknife/GifWorker.ts
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (C) 2022 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import worker, { ThreadWorkerGlobalScope } from '@ohos.worker';
-import { parseBufferToFrame } from './src/main/ets/components/imageknife/utils/gif/parse/GIFParse'
-
-export enum LoadType {
- loadBufferByWorker = "loadBufferByWorker"
-}
-
-var workerPort : ThreadWorkerGlobalScope = worker.workerPort;
-
-// Send or Receive Format Data Such as: {type: yourResolveType, data: yourDataJson, error?: yourErrorInfo }
-export function gifHandler(e) {
- let data = e.data;
- switch (data.type) {
- case LoadType.loadBufferByWorker:
- loadBufferByWorker(data.data, data.type);
- break;
- default:
- break
- }
-
- function loadBufferByWorker(buffer: ArrayBuffer, recType: string) {
- let images = parseBufferToFrame(buffer);
- let dimss = [];
- let delays = [];
- let disposalTypes = [];
- let patchs = [];
- let transparentIndexs = [];
- for (let i = 0; i < images.length; i++) {
- dimss.push(images[i].dims)
- delays.push(images[i].delay)
- disposalTypes.push(images[i].disposalType)
- patchs.push(images[i].patch.buffer)
- transparentIndexs.push(images[i].transparentIndex)
- }
- let frame = {
- dims: dimss,
- // 当前帧到下一帧的间隔时长
- delay: delays,
- // 当前帧绘制要求 0保留 1在上一帧绘制此帧 2恢复画布背景 3.将画布恢复到绘制当前图像之前的先前状态
- disposalType: disposalTypes,
- // Uint8CampedArray颜色转换后的补片信息用于绘制
- patch: patchs,
- // 表示透明度的可选颜色索引
- transparentIndex: transparentIndexs
- }
- let dataObj = { type: recType, data: frame }
- workerPort.postMessage(dataObj, patchs)
- }
-}
-
-
-
-
-
-
-
diff --git a/imageknife/index.ets b/imageknife/index.ets
index 1dbd5ef..1c69cec 100644
--- a/imageknife/index.ets
+++ b/imageknife/index.ets
@@ -118,8 +118,7 @@ export { SVGParseImpl } from './src/main/ets/components/imageknife/utils/svg/SVG
*/
export { GIFParseImpl } from './src/main/ets/components/imageknife/utils/gif/GIFParseImpl'
export { GIFFrame } from './src/main/ets/components/imageknife/utils/gif/GIFFrame'
-// 能力增强worker 解析GIF数据
-export { gifHandler } from './GifWorker'
+
// 自定义组件新增
// 自定义组件绘制生命周期
diff --git a/imageknife/oh-package.json5 b/imageknife/oh-package.json5
index 25e8865..c55ced7 100644
--- a/imageknife/oh-package.json5
+++ b/imageknife/oh-package.json5
@@ -14,11 +14,12 @@
"main": "index.ets",
"repository": "https://gitee.com/openharmony-tpc/ImageKnife",
"type": "module",
- "version": "2.1.1-rc.3",
+ "version": "2.1.1-rc.4",
"dependencies": {
"@ohos/disklrucache": "^2.0.2-rc.0",
"@ohos/svg": "^2.1.1-rc.0",
- "@ohos/gpu_transform": "^1.0.0"
+ "@ohos/gpu_transform": "^1.0.0",
+ "pako": "^2.1.0"
},
"tags": [
"ImageCache",
diff --git a/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/index.js b/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/index.js
deleted file mode 100644
index 94dd639..0000000
--- a/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/index.js
+++ /dev/null
@@ -1,71 +0,0 @@
-"use strict";
-
-
-
-
-var parse = function parse(stream, schema) {
- var result = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
- var parent = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : result;
-
- if (Array.isArray(schema)) {
- schema.forEach(function (partSchema) {
- return parse(stream, partSchema, result, parent);
- });
- } else if (typeof schema === 'function') {
- schema(stream, result, parent, parse);
- } else {
- var key = Object.keys(schema)[0];
-
- if (Array.isArray(schema[key])) {
- parent[key] = {};
- parse(stream, schema[key], result, parent[key]);
- } else {
- parent[key] = schema[key](stream, result, parent, parse);
- }
- }
-
- return result;
-};
-
-
-
-var conditional = function conditional(schema, conditionFunc) {
- return function (stream, result, parent, parse) {
- if (conditionFunc(stream, result, parent)) {
- parse(stream, schema, result, parent);
- }
- };
-};
-
-
-
-var loop = function loop(schema, continueFunc) {
- return function (stream, result, parent, parse) {
- var arr = [];
- var lastStreamPos = stream.pos;
-
- while (continueFunc(stream, result, parent)) {
- var newParent = {};
- parse(stream, schema, result, newParent); // cases when whole file is parsed but no termination is there and stream position is not getting updated as well
- // it falls into infinite recursion, null check to avoid the same
-
- if (stream.pos === lastStreamPos) {
- break;
- }
-
- lastStreamPos = stream.pos;
- arr.push(newParent);
- }
-
- return arr;
- };
-};
-
-
-
-export{
- loop,
- conditional,
- parse
-}
-
diff --git a/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/parsers/uint8.js b/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/parsers/uint8.js
deleted file mode 100644
index 82c16d7..0000000
--- a/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/parsers/uint8.js
+++ /dev/null
@@ -1,114 +0,0 @@
-"use strict";
-
-
-// Default stream and parsers for Uint8TypedArray data type
-var buildStream = function buildStream(uint8Data) {
- return {
- data: uint8Data,
- pos: 0
- };
-};
-
-
-
-var readByte = function readByte() {
- return function (stream) {
- return stream.data[stream.pos++];
- };
-};
-
-
-
-var peekByte = function peekByte() {
- var offset = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
- return function (stream) {
- return stream.data[stream.pos + offset];
- };
-};
-
-
-
-var readBytes = function readBytes(length) {
- return function (stream) {
- return stream.data.subarray(stream.pos, stream.pos += length);
- };
-};
-
-
-var peekBytes = function peekBytes(length) {
- return function (stream) {
- return stream.data.subarray(stream.pos, stream.pos + length);
- };
-};
-
-
-var readString = function readString(length) {
- return function (stream) {
- return Array.from(readBytes(length)(stream)).map(function (value) {
- return String.fromCharCode(value);
- }).join('');
- };
-};
-
-
-
-var readUnsigned = function readUnsigned(littleEndian) {
- return function (stream) {
- var bytes = readBytes(2)(stream);
- return littleEndian ? (bytes[1] << 8) + bytes[0] : (bytes[0] << 8) + bytes[1];
- };
-};
-
-
-
-var readArray = function readArray(byteSize, totalOrFunc) {
- return function (stream, result, parent) {
- var total = typeof totalOrFunc === 'function' ? totalOrFunc(stream, result, parent) : totalOrFunc;
- var parser = readBytes(byteSize);
- var arr = new Array(total);
-
- for (var i = 0; i < total; i++) {
- arr[i] = parser(stream);
- }
-
- return arr;
- };
-};
-
-
-var subBitsTotal = function subBitsTotal(bits, startIndex, length) {
- var result = 0;
-
- for (var i = 0; i < length; i++) {
- result += bits[startIndex + i] && Math.pow(2, length - i - 1);
- }
-
- return result;
-};
-
-var readBits = function readBits(schema) {
- return function (stream) {
- var _byte = readByte()(stream); // convert the byte to bit array
-
-
- var bits = new Array(8);
-
- for (var i = 0; i < 8; i++) {
- bits[7 - i] = !!(_byte & 1 << i);
- } // convert the bit array to values based on the schema
-
-
- return Object.keys(schema).reduce(function (res, key) {
- var def = schema[key];
-
- if (def.length) {
- res[key] = subBitsTotal(bits, def.index, def.length);
- } else {
- res[key] = bits[def.index];
- }
-
- return res;
- }, {});
- };
-};
-export {buildStream,readByte,peekByte,readBytes,peekBytes,readString,readUnsigned,readArray,readBits}
diff --git a/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/schemas/gif.js b/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/schemas/gif.js
deleted file mode 100644
index 1e4c6e8..0000000
--- a/imageknife/src/main/ets/components/3rd_party/jsbinaryschemaparser/lib/schemas/gif.js
+++ /dev/null
@@ -1,220 +0,0 @@
-"use strict";
-
-
-import {
- loop,
- conditional,
- parse
-} from "../";
-
-import {buildStream,readByte,peekByte,readBytes,peekBytes,readString,readUnsigned,readArray,readBits} from "../parsers/uint8";
-
-// a set of 0x00 terminated subblocks
-var subBlocksSchema = {
- blocks: function blocks(stream) {
- var terminator = 0x00;
- var chunks = [];
- var streamSize = stream.data.length;
- var total = 0;
-
- for (var size = (0, readByte)()(stream); size !== terminator; size = (0, readByte)()(stream)) {
- // size becomes undefined for some case when file is corrupted and terminator is not proper
- // null check to avoid recursion
- if (!size) break; // catch corrupted files with no terminator
-
- if (stream.pos + size >= streamSize) {
- var availableSize = streamSize - stream.pos;
- chunks.push((0, readBytes)(availableSize)(stream));
- total += availableSize;
- break;
- }
-
- chunks.push((0, readBytes)(size)(stream));
- total += size;
- }
-
- var result = new Uint8Array(total);
- var offset = 0;
-
- for (var i = 0; i < chunks.length; i++) {
- result.set(chunks[i], offset);
- offset += chunks[i].length;
- }
-
- return result;
- }
-}; // global control extension
-
-var gceSchema = (0, conditional)({
- gce: [{
- codes: (0, readBytes)(2)
- }, {
- byteSize: (0, readByte)()
- }, {
- extras: (0, readBits)({
- future: {
- index: 0,
- length: 3
- },
- disposal: {
- index: 3,
- length: 3
- },
- userInput: {
- index: 6
- },
- transparentColorGiven: {
- index: 7
- }
- })
- }, {
- delay: (0, readUnsigned)(true)
- }, {
- transparentColorIndex: (0, readByte)()
- }, {
- terminator: (0, readByte)()
- }]
-}, function (stream) {
- var codes = (0, peekBytes)(2)(stream);
- return codes[0] === 0x21 && codes[1] === 0xf9;
-}); // image pipeline block
-
-var imageSchema = (0, conditional)({
- image: [{
- code: (0, readByte)()
- }, {
- descriptor: [{
- left: (0, readUnsigned)(true)
- }, {
- top: (0, readUnsigned)(true)
- }, {
- width: (0, readUnsigned)(true)
- }, {
- height: (0, readUnsigned)(true)
- }, {
- lct: (0, readBits)({
- exists: {
- index: 0
- },
- interlaced: {
- index: 1
- },
- sort: {
- index: 2
- },
- future: {
- index: 3,
- length: 2
- },
- size: {
- index: 5,
- length: 3
- }
- })
- }]
- }, (0, conditional)({
- lct: (0, readArray)(3, function (stream, result, parent) {
- return Math.pow(2, parent.descriptor.lct.size + 1);
- })
- }, function (stream, result, parent) {
- return parent.descriptor.lct.exists;
- }), {
- data: [{
- minCodeSize: (0, readByte)()
- }, subBlocksSchema]
- }]
-}, function (stream) {
- return (0, peekByte)()(stream) === 0x2c;
-}); // plain text block
-
-var textSchema = (0, conditional)({
- text: [{
- codes: (0, readBytes)(2)
- }, {
- blockSize: (0, readByte)()
- }, {
- preData: function preData(stream, result, parent) {
- return (0, readBytes)(parent.text.blockSize)(stream);
- }
- }, subBlocksSchema]
-}, function (stream) {
- var codes = (0, peekBytes)(2)(stream);
- return codes[0] === 0x21 && codes[1] === 0x01;
-}); // application block
-
-var applicationSchema = (0, conditional)({
- application: [{
- codes: (0, readBytes)(2)
- }, {
- blockSize: (0, readByte)()
- }, {
- id: function id(stream, result, parent) {
- return (0, readString)(parent.blockSize)(stream);
- }
- }, subBlocksSchema]
-}, function (stream) {
- var codes = (0, peekBytes)(2)(stream);
- return codes[0] === 0x21 && codes[1] === 0xff;
-}); // comment block
-
-var commentSchema = (0, conditional)({
- comment: [{
- codes: (0, readBytes)(2)
- }, subBlocksSchema]
-}, function (stream) {
- var codes = (0, peekBytes)(2)(stream);
- return codes[0] === 0x21 && codes[1] === 0xfe;
-});
-var schema = [{
- header: [{
- signature: (0, readString)(3)
- }, {
- version: (0, readString)(3)
- }]
-}, {
- lsd: [{
- width: (0, readUnsigned)(true)
- }, {
- height: (0, readUnsigned)(true)
- }, {
- gct: (0, readBits)({
- exists: {
- index: 0
- },
- resolution: {
- index: 1,
- length: 3
- },
- sort: {
- index: 4
- },
- size: {
- index: 5,
- length: 3
- }
- })
- }, {
- backgroundColorIndex: (0, readByte)()
- }, {
- pixelAspectRatio: (0, readByte)()
- }]
-}, (0, conditional)({
- gct: (0, readArray)(3, function (stream, result) {
- return Math.pow(2, result.lsd.gct.size + 1);
- })
-}, function (stream, result) {
- return result.lsd.gct.exists;
-}), // content frames
-{
- frames: (0, loop)([gceSchema, applicationSchema, commentSchema, imageSchema, textSchema], function (stream) {
- var nextCode = (0, peekByte)()(stream); // rather than check for a terminator, we should check for the existence
- // of an ext or image block to avoid infinite loops
- //var terminator = 0x3B;
- //return nextCode !== terminator;
-
- return nextCode === 0x21 || nextCode === 0x2c;
- })
-}];
-var _default = schema;
-
-export {_default}
\ No newline at end of file
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/deflate.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/deflate.js
deleted file mode 100644
index 4dd94e9..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/deflate.js
+++ /dev/null
@@ -1,383 +0,0 @@
-'use strict';
-
-
-
-import {deflateInit2,deflateSetHeader,deflateEnd,deflate as deflate1,deflateSetDictionary} from './zlib/deflate';
-import {assign,flattenChunks} from './utils/common';
-import { string2buf } from './utils/strings';
-import msg from './zlib/messages';
-import {ZStream} from './zlib/zstream';
-
-const toString = Object.prototype.toString;
-
-/* Public constants ==========================================================*/
-/* ===========================================================================*/
-
-import constants1 from './zlib/constants';
-var Z_NO_FLUSH = constants1.Z_NO_FLUSH;
-var Z_SYNC_FLUSH = constants1.Z_SYNC_FLUSH;
-var Z_FULL_FLUSH = constants1.Z_FULL_FLUSH;
-var Z_FINISH = constants1.Z_FINISH;
-var Z_OK = constants1.Z_OK;
-var Z_STREAM_END = constants1.Z_STREAM_END;
-var Z_DEFAULT_COMPRESSION = constants1.Z_DEFAULT_COMPRESSION;
-var Z_DEFAULT_STRATEGY = constants1.Z_DEFAULT_STRATEGY;
-var Z_DEFLATED = constants1.Z_DEFLATED;
-
-/* ===========================================================================*/
-
-
-/**
- * class Deflate
- *
- * Generic JS-style wrapper for zlib calls. If you don't need
- * streaming behaviour - use more simple functions: [[deflate]],
- * [[deflateRaw]] and [[gzip]].
- **/
-
-/* internal
- * Deflate.chunks -> Array
- *
- * Chunks of output data, if [[Deflate#onData]] not overridden.
- **/
-
-/**
- * Deflate.result -> Uint8Array
- *
- * Compressed result, generated by default [[Deflate#onData]]
- * and [[Deflate#onEnd]] handlers. Filled after you push last chunk
- * (call [[Deflate#push]] with `Z_FINISH` / `true` param).
- **/
-
-/**
- * Deflate.err -> Number
- *
- * Error code after deflate finished. 0 (Z_OK) on success.
- * You will not need it in real life, because deflate errors
- * are possible only on wrong options or bad `onData` / `onEnd`
- * custom handlers.
- **/
-
-/**
- * Deflate.msg -> String
- *
- * Error message, if [[Deflate.err]] != 0
- **/
-
-
-/**
- * new Deflate(options)
- * - options (Object): zlib deflate options.
- *
- * Creates new deflator instance with specified params. Throws exception
- * on bad params. Supported options:
- *
- * - `level`
- * - `windowBits`
- * - `memLevel`
- * - `strategy`
- * - `dictionary`
- *
- * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)
- * for more information on these.
- *
- * Additional options, for internal needs:
- *
- * - `chunkSize` - size of generated data chunks (16K by default)
- * - `raw` (Boolean) - do raw deflate
- * - `gzip` (Boolean) - create gzip wrapper
- * - `header` (Object) - custom header for gzip
- * - `text` (Boolean) - true if compressed data believed to be text
- * - `time` (Number) - modification time, unix timestamp
- * - `os` (Number) - operation system code
- * - `extra` (Array) - array of bytes with extra data (max 65536)
- * - `name` (String) - file name (binary string)
- * - `comment` (String) - comment (binary string)
- * - `hcrc` (Boolean) - true if header crc should be added
- *
- * ##### Example:
- *
- * ```javascript
- * const pako = require('pako')
- * , chunk1 = new Uint8Array([1,2,3,4,5,6,7,8,9])
- * , chunk2 = new Uint8Array([10,11,12,13,14,15,16,17,18,19]);
- *
- * const deflate = new pako.Deflate({ level: 3});
- *
- * deflate.push(chunk1, false);
- * deflate.push(chunk2, true); // true -> last chunk
- *
- * if (deflate.err) { throw new Error(deflate.err); }
- *
- * console.log(deflate.result);
- * ```
- **/
-function Deflate(options) {
- this.options = assign({
- level: Z_DEFAULT_COMPRESSION,
- method: Z_DEFLATED,
- chunkSize: 16384,
- windowBits: 15,
- memLevel: 8,
- strategy: Z_DEFAULT_STRATEGY
- }, options || {});
-
- let opt = this.options;
-
- if (opt.raw && (opt.windowBits > 0)) {
- opt.windowBits = -opt.windowBits;
- }
-
- else if (opt.gzip && (opt.windowBits > 0) && (opt.windowBits < 16)) {
- opt.windowBits += 16;
- }
-
- this.err = 0; // error code, if happens (0 = Z_OK)
- this.msg = ''; // error message
- this.ended = false; // used to avoid multiple onEnd() calls
- this.chunks = []; // chunks of compressed data
-
- this.strm = new ZStream();
- this.strm.avail_out = 0;
-
- let status = deflateInit2(
- this.strm,
- opt.level,
- opt.method,
- opt.windowBits,
- opt.memLevel,
- opt.strategy
- );
-
- if (status !== Z_OK) {
- throw new Error(msg[status]);
- }
-
- if (opt.header) {
- deflateSetHeader(this.strm, opt.header);
- }
-
- if (opt.dictionary) {
- let dict;
- // Convert data if needed
- if (typeof opt.dictionary === 'string') {
- // If we need to compress text, change encoding to utf8.
- dict = string2buf(opt.dictionary);
- } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') {
- dict = new Uint8Array(opt.dictionary);
- } else {
- dict = opt.dictionary;
- }
-
- status = deflateSetDictionary(this.strm, dict);
-
- if (status !== Z_OK) {
- throw new Error(msg[status]);
- }
-
- this._dict_set = true;
- }
-}
-
-/**
- * Deflate#push(data[, flush_mode]) -> Boolean
- * - data (Uint8Array|ArrayBuffer|String): input data. Strings will be
- * converted to utf8 byte sequence.
- * - flush_mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes.
- * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH.
- *
- * Sends input data to deflate pipe, generating [[Deflate#onData]] calls with
- * new compressed chunks. Returns `true` on success. The last data block must
- * have `flush_mode` Z_FINISH (or `true`). That will flush internal pending
- * buffers and call [[Deflate#onEnd]].
- *
- * On fail call [[Deflate#onEnd]] with error code and return false.
- *
- * ##### Example
- *
- * ```javascript
- * push(chunk, false); // push one of data chunks
- * ...
- * push(chunk, true); // push last chunk
- * ```
- **/
-Deflate.prototype.push = function (data, flush_mode) {
- const strm = this.strm;
- const chunkSize = this.options.chunkSize;
- let status, _flush_mode;
-
- if (this.ended) { return false; }
-
- if (flush_mode === ~~flush_mode) _flush_mode = flush_mode;
- else _flush_mode = flush_mode === true ? Z_FINISH : Z_NO_FLUSH;
-
- // Convert data if needed
- if (typeof data === 'string') {
- // If we need to compress text, change encoding to utf8.
- strm.input = string2buf(data);
- } else if (toString.call(data) === '[object ArrayBuffer]') {
- strm.input = new Uint8Array(data);
- } else {
- strm.input = data;
- }
-
- strm.next_in = 0;
- strm.avail_in = strm.input.length;
-
- for (;;) {
- if (strm.avail_out === 0) {
- strm.output = new Uint8Array(chunkSize);
- strm.next_out = 0;
- strm.avail_out = chunkSize;
- }
-
- // Make sure avail_out > 6 to avoid repeating markers
- if ((_flush_mode === Z_SYNC_FLUSH || _flush_mode === Z_FULL_FLUSH) && strm.avail_out <= 6) {
- this.onData(strm.output.subarray(0, strm.next_out));
- strm.avail_out = 0;
- continue;
- }
-
- status = deflate1(strm, _flush_mode);
-
- // Ended => flush and finish
- if (status === Z_STREAM_END) {
- if (strm.next_out > 0) {
- this.onData(strm.output.subarray(0, strm.next_out));
- }
- status = deflateEnd(this.strm);
- this.onEnd(status);
- this.ended = true;
- return status === Z_OK;
- }
-
- // Flush if out buffer full
- if (strm.avail_out === 0) {
- this.onData(strm.output);
- continue;
- }
-
- // Flush if requested and has data
- if (_flush_mode > 0 && strm.next_out > 0) {
- this.onData(strm.output.subarray(0, strm.next_out));
- strm.avail_out = 0;
- continue;
- }
-
- if (strm.avail_in === 0) break;
- }
-
- return true;
-};
-
-
-/**
- * Deflate#onData(chunk) -> Void
- * - chunk (Uint8Array): output data.
- *
- * By default, stores data blocks in `chunks[]` property and glue
- * those in `onEnd`. Override this handler, if you need another behaviour.
- **/
-Deflate.prototype.onData = function (chunk) {
- this.chunks.push(chunk);
-};
-
-
-/**
- * Deflate#onEnd(status) -> Void
- * - status (Number): deflate status. 0 (Z_OK) on success,
- * other if not.
- *
- * Called once after you tell deflate that the input stream is
- * complete (Z_FINISH). By default - join collected chunks,
- * free memory and fill `results` / `err` properties.
- **/
-Deflate.prototype.onEnd = function (status) {
- // On success - join
- if (status === Z_OK) {
- this.result = flattenChunks(this.chunks);
- }
- this.chunks = [];
- this.err = status;
- this.msg = this.strm.msg;
-};
-
-
-/**
- * deflate(data[, options]) -> Uint8Array
- * - data (Uint8Array|ArrayBuffer|String): input data to compress.
- * - options (Object): zlib deflate options.
- *
- * Compress `data` with deflate algorithm and `options`.
- *
- * Supported options are:
- *
- * - level
- * - windowBits
- * - memLevel
- * - strategy
- * - dictionary
- *
- * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)
- * for more information on these.
- *
- * Sugar (options):
- *
- * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify
- * negative windowBits implicitly.
- *
- * ##### Example:
- *
- * ```javascript
- * const pako = require('pako')
- * const data = new Uint8Array([1,2,3,4,5,6,7,8,9]);
- *
- * console.log(pako.deflate(data));
- * ```
- **/
-function deflate(input, options) {
- const deflator = new Deflate(options);
-
- deflator.push(input, true);
-
- // That will never happens, if you don't cheat with options :)
- if (deflator.err) { throw deflator.msg || msg[deflator.err]; }
-
- return deflator.result;
-}
-
-
-/**
- * deflateRaw(data[, options]) -> Uint8Array
- * - data (Uint8Array|ArrayBuffer|String): input data to compress.
- * - options (Object): zlib deflate options.
- *
- * The same as [[deflate]], but creates raw data, without wrapper
- * (header and adler32 crc).
- **/
-function deflateRaw(input, options) {
- options = options || {};
- options.raw = true;
- return deflate(input, options);
-}
-
-
-/**
- * gzip(data[, options]) -> Uint8Array
- * - data (Uint8Array|ArrayBuffer|String): input data to compress.
- * - options (Object): zlib deflate options.
- *
- * The same as [[deflate]], but create gzip wrapper instead of
- * deflate one.
- **/
-function gzip(input, options) {
- options = options || {};
- options.gzip = true;
- return deflate(input, options);
-}
-
-import constants from './zlib/constants'
-
-export {Deflate,deflate,deflateRaw,gzip,constants}
-
-
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/inflate.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/inflate.js
deleted file mode 100644
index 3266896..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/inflate.js
+++ /dev/null
@@ -1,421 +0,0 @@
-'use strict';
-
-import {inflateReset,inflateInit2,inflate as inflate1,inflateEnd,inflateGetHeader,inflateSetDictionary,inflateInfo } from './zlib/inflate'
-
-import {assign,flattenChunks} from './utils/common';
-import {string2buf,buf2string,utf8border} from './utils/strings';
-import msg from './zlib/messages';
-import {ZStream} from './zlib/zstream';
-import {GZheader} from './zlib/gzheader';
-
-const toString = Object.prototype.toString;
-
-/* Public constants ==========================================================*/
-/* ===========================================================================*/
-
-import constants from './zlib/constants';
-var Z_NO_FLUSH = constants.Z_NO_FLUSH;
-var Z_FINISH = constants.Z_FINISH;
-var Z_OK = constants.Z_OK;
-var Z_STREAM_END = constants.Z_STREAM_END;
-var Z_NEED_DICT = constants.Z_NEED_DICT;
-var Z_STREAM_ERROR = constants.Z_STREAM_ERROR;
-var Z_DATA_ERROR = constants.Z_DATA_ERROR;
-var Z_MEM_ERROR = constants.Z_MEM_ERROR;
-
-/* ===========================================================================*/
-
-
-/**
- * class Inflate
- *
- * Generic JS-style wrapper for zlib calls. If you don't need
- * streaming behaviour - use more simple functions: [[inflate]]
- * and [[inflateRaw]].
- **/
-
-/* internal
- * inflate.chunks -> Array
- *
- * Chunks of output data, if [[Inflate#onData]] not overridden.
- **/
-
-/**
- * Inflate.result -> Uint8Array|String
- *
- * Uncompressed result, generated by default [[Inflate#onData]]
- * and [[Inflate#onEnd]] handlers. Filled after you push last chunk
- * (call [[Inflate#push]] with `Z_FINISH` / `true` param).
- **/
-
-/**
- * Inflate.err -> Number
- *
- * Error code after inflate finished. 0 (Z_OK) on success.
- * Should be checked if broken data possible.
- **/
-
-/**
- * Inflate.msg -> String
- *
- * Error message, if [[Inflate.err]] != 0
- **/
-
-
-/**
- * new Inflate(options)
- * - options (Object): zlib inflate options.
- *
- * Creates new inflator instance with specified params. Throws exception
- * on bad params. Supported options:
- *
- * - `windowBits`
- * - `dictionary`
- *
- * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)
- * for more information on these.
- *
- * Additional options, for internal needs:
- *
- * - `chunkSize` - size of generated data chunks (16K by default)
- * - `raw` (Boolean) - do raw inflate
- * - `to` (String) - if equal to 'string', then result will be converted
- * from utf8 to utf16 (javascript) string. When string output requested,
- * chunk length can differ from `chunkSize`, depending on content.
- *
- * By default, when no options set, autodetect deflate/gzip data format via
- * wrapper header.
- *
- * ##### Example:
- *
- * ```javascript
- * const pako = require('pako')
- * const chunk1 = new Uint8Array([1,2,3,4,5,6,7,8,9])
- * const chunk2 = new Uint8Array([10,11,12,13,14,15,16,17,18,19]);
- *
- * const inflate = new pako.Inflate({ level: 3});
- *
- * inflate.push(chunk1, false);
- * inflate.push(chunk2, true); // true -> last chunk
- *
- * if (inflate.err) { throw new Error(inflate.err); }
- *
- * console.log(inflate.result);
- * ```
- **/
-function Inflate(options) {
- this.options = utils.assign({
- chunkSize: 1024 * 64,
- windowBits: 15,
- to: ''
- }, options || {});
-
- const opt = this.options;
-
- // Force window size for `raw` data, if not set directly,
- // because we have no header for autodetect.
- if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) {
- opt.windowBits = -opt.windowBits;
- if (opt.windowBits === 0) { opt.windowBits = -15; }
- }
-
- // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate
- if ((opt.windowBits >= 0) && (opt.windowBits < 16) &&
- !(options && options.windowBits)) {
- opt.windowBits += 32;
- }
-
- // Gzip header has no info about windows size, we can do autodetect only
- // for deflate. So, if window size not set, force it to max when gzip possible
- if ((opt.windowBits > 15) && (opt.windowBits < 48)) {
- // bit 3 (16) -> gzipped data
- // bit 4 (32) -> autodetect gzip/deflate
- if ((opt.windowBits & 15) === 0) {
- opt.windowBits |= 15;
- }
- }
-
- this.err = 0; // error code, if happens (0 = Z_OK)
- this.msg = ''; // error message
- this.ended = false; // used to avoid multiple onEnd() calls
- this.chunks = []; // chunks of compressed data
-
- this.strm = new ZStream();
- this.strm.avail_out = 0;
-
- let status = inflateInit2(
- this.strm,
- opt.windowBits
- );
-
- if (status !== Z_OK) {
- throw new Error(msg[status]);
- }
-
- this.header = new GZheader();
-
- inflateGetHeader(this.strm, this.header);
-
- // Setup dictionary
- if (opt.dictionary) {
- // Convert data if needed
- if (typeof opt.dictionary === 'string') {
- opt.dictionary = strings.string2buf(opt.dictionary);
- } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') {
- opt.dictionary = new Uint8Array(opt.dictionary);
- }
- if (opt.raw) { //In raw mode we need to set the dictionary early
- status = inflateSetDictionary(this.strm, opt.dictionary);
- if (status !== Z_OK) {
- throw new Error(msg[status]);
- }
- }
- }
-}
-
-/**
- * Inflate#push(data[, flush_mode]) -> Boolean
- * - data (Uint8Array|ArrayBuffer): input data
- * - flush_mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE
- * flush modes. See constants. Skipped or `false` means Z_NO_FLUSH,
- * `true` means Z_FINISH.
- *
- * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with
- * new output chunks. Returns `true` on success. If end of stream detected,
- * [[Inflate#onEnd]] will be called.
- *
- * `flush_mode` is not needed for normal operation, because end of stream
- * detected automatically. You may try to use it for advanced things, but
- * this functionality was not tested.
- *
- * On fail call [[Inflate#onEnd]] with error code and return false.
- *
- * ##### Example
- *
- * ```javascript
- * push(chunk, false); // push one of data chunks
- * ...
- * push(chunk, true); // push last chunk
- * ```
- **/
-Inflate.prototype.push = function (data, flush_mode) {
- const strm = this.strm;
- const chunkSize = this.options.chunkSize;
- const dictionary = this.options.dictionary;
- let status, _flush_mode, last_avail_out;
-
- if (this.ended) return false;
-
- if (flush_mode === ~~flush_mode) _flush_mode = flush_mode;
- else _flush_mode = flush_mode === true ? Z_FINISH : Z_NO_FLUSH;
-
- // Convert data if needed
- if (toString.call(data) === '[object ArrayBuffer]') {
- strm.input = new Uint8Array(data);
- } else {
- strm.input = data;
- }
-
- strm.next_in = 0;
- strm.avail_in = strm.input.length;
-
- for (;;) {
- if (strm.avail_out === 0) {
- strm.output = new Uint8Array(chunkSize);
- strm.next_out = 0;
- strm.avail_out = chunkSize;
- }
-
- status = inflate1(strm, _flush_mode);
-
- if (status === Z_NEED_DICT && dictionary) {
- status = inflateSetDictionary(strm, dictionary);
-
- if (status === Z_OK) {
- status = inflate1(strm, _flush_mode);
- } else if (status === Z_DATA_ERROR) {
- // Replace code with more verbose
- status = Z_NEED_DICT;
- }
- }
-
- // Skip snyc markers if more data follows and not raw mode
- while (strm.avail_in > 0 &&
- status === Z_STREAM_END &&
- strm.state.wrap > 0 &&
- data[strm.next_in] !== 0)
- {
- inflateReset(strm);
- status = inflate1(strm, _flush_mode);
- }
-
- switch (status) {
- case Z_STREAM_ERROR:
- case Z_DATA_ERROR:
- case Z_NEED_DICT:
- case Z_MEM_ERROR:
- this.onEnd(status);
- this.ended = true;
- return false;
- }
-
- // Remember real `avail_out` value, because we may patch out buffer content
- // to align utf8 strings boundaries.
- last_avail_out = strm.avail_out;
-
- if (strm.next_out) {
- if (strm.avail_out === 0 || status === Z_STREAM_END) {
-
- if (this.options.to === 'string') {
-
- let next_out_utf8 = strings.utf8border(strm.output, strm.next_out);
-
- let tail = strm.next_out - next_out_utf8;
- let utf8str = strings.buf2string(strm.output, next_out_utf8);
-
- // move tail & realign counters
- strm.next_out = tail;
- strm.avail_out = chunkSize - tail;
- if (tail) strm.output.set(strm.output.subarray(next_out_utf8, next_out_utf8 + tail), 0);
-
- this.onData(utf8str);
-
- } else {
- this.onData(strm.output.length === strm.next_out ? strm.output : strm.output.subarray(0, strm.next_out));
- }
- }
- }
-
- // Must repeat iteration if out buffer is full
- if (status === Z_OK && last_avail_out === 0) continue;
-
- // Finalize if end of stream reached.
- if (status === Z_STREAM_END) {
- status = inflateEnd(this.strm);
- this.onEnd(status);
- this.ended = true;
- return true;
- }
-
- if (strm.avail_in === 0) break;
- }
-
- return true;
-};
-
-
-/**
- * Inflate#onData(chunk) -> Void
- * - chunk (Uint8Array|String): output data. When string output requested,
- * each chunk will be string.
- *
- * By default, stores data blocks in `chunks[]` property and glue
- * those in `onEnd`. Override this handler, if you need another behaviour.
- **/
-Inflate.prototype.onData = function (chunk) {
- this.chunks.push(chunk);
-};
-
-
-/**
- * Inflate#onEnd(status) -> Void
- * - status (Number): inflate status. 0 (Z_OK) on success,
- * other if not.
- *
- * Called either after you tell inflate that the input stream is
- * complete (Z_FINISH). By default - join collected chunks,
- * free memory and fill `results` / `err` properties.
- **/
-Inflate.prototype.onEnd = function (status) {
- // On success - join
- if (status === Z_OK) {
- if (this.options.to === 'string') {
- this.result = this.chunks.join('');
- } else {
- this.result = utils.flattenChunks(this.chunks);
- }
- }
- this.chunks = [];
- this.err = status;
- this.msg = this.strm.msg;
-};
-
-
-/**
- * inflate(data[, options]) -> Uint8Array|String
- * - data (Uint8Array|ArrayBuffer): input data to decompress.
- * - options (Object): zlib inflate options.
- *
- * Decompress `data` with inflate/ungzip and `options`. Autodetect
- * format via wrapper header by default. That's why we don't provide
- * separate `ungzip` method.
- *
- * Supported options are:
- *
- * - windowBits
- *
- * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)
- * for more information.
- *
- * Sugar (options):
- *
- * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify
- * negative windowBits implicitly.
- * - `to` (String) - if equal to 'string', then result will be converted
- * from utf8 to utf16 (javascript) string. When string output requested,
- * chunk length can differ from `chunkSize`, depending on content.
- *
- *
- * ##### Example:
- *
- * ```javascript
- * const pako = require('pako');
- * const input = pako.deflate(new Uint8Array([1,2,3,4,5,6,7,8,9]));
- * let output;
- *
- * try {
- * output = pako.inflate(input);
- * } catch (err) {
- * console.log(err);
- * }
- * ```
- **/
-function inflate(input, options) {
- const inflator = new Inflate(options);
-
- inflator.push(input);
-
- // That will never happens, if you don't cheat with options :)
- if (inflator.err) throw inflator.msg || msg[inflator.err];
-
- return inflator.result;
-}
-
-
-/**
- * inflateRaw(data[, options]) -> Uint8Array|String
- * - data (Uint8Array|ArrayBuffer): input data to decompress.
- * - options (Object): zlib inflate options.
- *
- * The same as [[inflate]], but creates raw data, without wrapper
- * (header and adler32 crc).
- **/
-function inflateRaw(input, options) {
- options = options || {};
- options.raw = true;
- return inflate(input, options);
-}
-
-
-/**
- * ungzip(data[, options]) -> Uint8Array|String
- * - data (Uint8Array|ArrayBuffer): input data to decompress.
- * - options (Object): zlib inflate options.
- *
- * Just shortcut to [[inflate]], because it autodetects format
- * by header.content. Done for convenience.
- **/
-var ungzip = inflate;
-
-export {Inflate,inflate,inflateRaw,ungzip,constants}
-
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/utils/common.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/utils/common.js
deleted file mode 100644
index dbccc6f..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/utils/common.js
+++ /dev/null
@@ -1,48 +0,0 @@
-'use strict';
-
-
-const _has = (obj, key) => {
- return Object.prototype.hasOwnProperty.call(obj, key);
-};
-
-export var assign = function (obj /*from1, from2, from3, ...*/) {
- const sources = Array.prototype.slice.call(arguments, 1);
- while (sources.length) {
- const source = sources.shift();
- if (!source) { continue; }
-
- if (typeof source !== 'object') {
- throw new TypeError(source + 'must be non-object');
- }
-
- for (const p in source) {
- if (_has(source, p)) {
- obj[p] = source[p];
- }
- }
- }
-
- return obj;
-};
-
-
-// Join array of chunks to single array.
-export var flattenChunks = (chunks) => {
- // calculate data length
- let len = 0;
-
- for (let i = 0, l = chunks.length; i < l; i++) {
- len += chunks[i].length;
- }
-
- // join chunks
- const result = new Uint8Array(len);
-
- for (let i = 0, pos = 0, l = chunks.length; i < l; i++) {
- let chunk = chunks[i];
- result.set(chunk, pos);
- pos += chunk.length;
- }
-
- return result;
-};
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/utils/strings.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/utils/strings.js
deleted file mode 100644
index 6adf40f..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/utils/strings.js
+++ /dev/null
@@ -1,174 +0,0 @@
-// String encode/decode helpers
-'use strict';
-
-
-// Quick check if we can use fast array to bin string conversion
-//
-// - apply(Array) can fail on Android 2.2
-// - apply(Uint8Array) can fail on iOS 5.1 Safari
-//
-let STR_APPLY_UIA_OK = true;
-
-try { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; }
-
-
-// Table with utf8 lengths (calculated by first byte of sequence)
-// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS,
-// because max possible codepoint is 0x10ffff
-const _utf8len = new Uint8Array(256);
-for (let q = 0; q < 256; q++) {
- _utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1);
-}
-_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start
-
-
-// convert string to array (typed, when possible)
-export var string2buf = (str) => {
-// if (typeof TextEncoder === 'function' && TextEncoder.prototype.encode) {
-// return new TextEncoder().encode(str);
-// }
-
- let buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0;
-
- // count binary size
- for (m_pos = 0; m_pos < str_len; m_pos++) {
- c = str.charCodeAt(m_pos);
- if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) {
- c2 = str.charCodeAt(m_pos + 1);
- if ((c2 & 0xfc00) === 0xdc00) {
- c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);
- m_pos++;
- }
- }
- buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;
- }
-
- // allocate buffer
- buf = new Uint8Array(buf_len);
-
- // convert
- for (i = 0, m_pos = 0; i < buf_len; m_pos++) {
- c = str.charCodeAt(m_pos);
- if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) {
- c2 = str.charCodeAt(m_pos + 1);
- if ((c2 & 0xfc00) === 0xdc00) {
- c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);
- m_pos++;
- }
- }
- if (c < 0x80) {
- /* one byte */
- buf[i++] = c;
- } else if (c < 0x800) {
- /* two bytes */
- buf[i++] = 0xC0 | (c >>> 6);
- buf[i++] = 0x80 | (c & 0x3f);
- } else if (c < 0x10000) {
- /* three bytes */
- buf[i++] = 0xE0 | (c >>> 12);
- buf[i++] = 0x80 | (c >>> 6 & 0x3f);
- buf[i++] = 0x80 | (c & 0x3f);
- } else {
- /* four bytes */
- buf[i++] = 0xf0 | (c >>> 18);
- buf[i++] = 0x80 | (c >>> 12 & 0x3f);
- buf[i++] = 0x80 | (c >>> 6 & 0x3f);
- buf[i++] = 0x80 | (c & 0x3f);
- }
- }
-
- return buf;
-};
-
-// Helper
-const buf2binstring = (buf, len) => {
- // On Chrome, the arguments in a function call that are allowed is `65534`.
- // If the length of the buffer is smaller than that, we can use this optimization,
- // otherwise we will take a slower path.
- if (len < 65534) {
- if (buf.subarray && STR_APPLY_UIA_OK) {
- return String.fromCharCode.apply(null, buf.length === len ? buf : buf.subarray(0, len));
- }
- }
-
- let result = '';
- for (let i = 0; i < len; i++) {
- result += String.fromCharCode(buf[i]);
- }
- return result;
-};
-
-
-// convert array to string
-export var buf2string = (buf, max) => {
- const len = max || buf.length;
-
-// if (typeof TextDecoder === 'function' && TextDecoder.prototype.decode) {
-// return new TextDecoder().decode(buf.subarray(0, max));
-// }
-
- let i, out;
-
- // Reserve max possible length (2 words per char)
- // NB: by unknown reasons, Array is significantly faster for
- // String.fromCharCode.apply than Uint16Array.
- const utf16buf = new Array(len * 2);
-
- for (out = 0, i = 0; i < len;) {
- let c = buf[i++];
- // quick process ascii
- if (c < 0x80) { utf16buf[out++] = c; continue; }
-
- let c_len = _utf8len[c];
- // skip 5 & 6 byte codes
- if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; }
-
- // apply mask on first byte
- c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07;
- // join the rest
- while (c_len > 1 && i < len) {
- c = (c << 6) | (buf[i++] & 0x3f);
- c_len--;
- }
-
- // terminated by end of string?
- if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; }
-
- if (c < 0x10000) {
- utf16buf[out++] = c;
- } else {
- c -= 0x10000;
- utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff);
- utf16buf[out++] = 0xdc00 | (c & 0x3ff);
- }
- }
-
- return buf2binstring(utf16buf, out);
-};
-
-
-// Calculate max possible position in utf8 buffer,
-// that will not break sequence. If that's not possible
-// - (very small limits) return max size as is.
-//
-// buf[] - utf8 bytes array
-// max - length limit (mandatory);
-export var utf8border = (buf, max) => {
-
- max = max || buf.length;
- if (max > buf.length) { max = buf.length; }
-
- // go back from last position, until start of sequence found
- let pos = max - 1;
- while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; }
-
- // Very small and broken sequence,
- // return max, because we should return something anyway.
- if (pos < 0) { return max; }
-
- // If we came to start of buffer - that means buffer is too small,
- // return max too.
- if (pos === 0) { return max; }
-
- return (pos + _utf8len[buf[pos]] > max) ? pos : max;
-};
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/adler32.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/adler32.js
deleted file mode 100644
index 7e838c5..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/adler32.js
+++ /dev/null
@@ -1,51 +0,0 @@
-'use strict';
-
-// Note: adler32 takes 12% for level 0 and 2% for level 6.
-// It isn't worth it to make additional optimizations as in original.
-// Small size is preferable.
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-const adler32 = (adler, buf, len, pos) => {
- let s1 = (adler & 0xffff) |0,
- s2 = ((adler >>> 16) & 0xffff) |0,
- n = 0;
-
- while (len !== 0) {
- // Set limit ~ twice less than 5552, to keep
- // s2 in 31-bits, because we force signed ints.
- // in other case %= will fail.
- n = len > 2000 ? 2000 : len;
- len -= n;
-
- do {
- s1 = (s1 + buf[pos++]) |0;
- s2 = (s2 + s1) |0;
- } while (--n);
-
- s1 %= 65521;
- s2 %= 65521;
- }
-
- return (s1 | (s2 << 16)) |0;
-};
-
-
-export default adler32;
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/constants.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/constants.js
deleted file mode 100644
index cfb8d4f..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/constants.js
+++ /dev/null
@@ -1,71 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-export default {
-
- /* Allowed flush values; see deflate() and inflate() below for details */
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_TREES: 6,
-
- /* Return codes for the compression/decompression functions. Negative values
- * are errors, positive values are used for special but normal events.
- */
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- //Z_VERSION_ERROR: -6,
-
- /* compression levels */
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
-
-
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
-
- /* Possible values of the data_type field (though see inflate()) */
- Z_BINARY: 0,
- Z_TEXT: 1,
- //Z_ASCII: 1, // = Z_TEXT (deprecated)
- Z_UNKNOWN: 2,
-
- /* The deflate compression method */
- Z_DEFLATED: 8
- //Z_NULL: null // Use -1 or null inline, depending on var type
-};
-
-
-
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/crc32.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/crc32.js
deleted file mode 100644
index 70bce7e..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/crc32.js
+++ /dev/null
@@ -1,58 +0,0 @@
-'use strict';
-
-// Note: we can't get significant speed boost here.
-// So write code to minimize size - no pregenerated tables
-// and array tools dependencies.
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-// Use ordinary array, since untyped makes no boost here
-const makeTable = () => {
- let c, table = [];
-
- for (var n = 0; n < 256; n++) {
- c = n;
- for (var k = 0; k < 8; k++) {
- c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));
- }
- table[n] = c;
- }
-
- return table;
-};
-
-// Create table on load. Just 255 signed longs. Not a problem.
-const crcTable = new Uint32Array(makeTable());
-
-
-const crc32 = (crc, buf, len, pos) => {
- const t = crcTable;
- const end = pos + len;
-
- crc ^= -1;
-
- for (let i = pos; i < end; i++) {
- crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF];
- }
-
- return (crc ^ (-1)); // >>> 0;
-};
-
-export default crc32;
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/deflate.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/deflate.js
deleted file mode 100644
index 19688e6..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/deflate.js
+++ /dev/null
@@ -1,2054 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-import { _tr_init, _tr_stored_block, _tr_flush_block, _tr_tally, _tr_align } from './trees';
-import adler32 from './adler32';
-import crc32 from './crc32';
-import msg from './messages';
-
-/* Public constants ==========================================================*/
-/* ===========================================================================*/
-
-import constants from './constants';
-var Z_FULL_FLUSH = constants.Z_FULL_FLUSH;
-var Z_NO_FLUSH = constants.Z_NO_FLUSH;
-var Z_PARTIAL_FLUSH = constants.Z_PARTIAL_FLUSH;
-var Z_FINISH = constants.Z_FINISH;
-var Z_BLOCK = constants.Z_BLOCK;
-var Z_OK = constants.Z_OK;
-var Z_STREAM_END = constants.Z_STREAM_END;
-var Z_STREAM_ERROR = constants.Z_STREAM_ERROR;
-var Z_DATA_ERROR = constants.Z_DATA_ERROR;
-var Z_BUF_ERROR = constants.Z_BUF_ERROR;
-var Z_DEFAULT_COMPRESSION = constants.Z_DEFAULT_COMPRESSION;
-var Z_FILTERED = constants.Z_FILTERED;
-var Z_HUFFMAN_ONLY = constants.Z_HUFFMAN_ONLY;
-var Z_RLE = constants.Z_RLE;
-var Z_FIXED = constants.Z_FIXED;
-var Z_DEFAULT_STRATEGY = constants.Z_DEFAULT_STRATEGY;
-var Z_UNKNOWN = constants.Z_UNKNOWN;
-var Z_DEFLATED = constants.Z_DEFLATED;
-/*============================================================================*/
-
-
-const MAX_MEM_LEVEL = 9;
-/* Maximum value for memLevel in deflateInit2 */
-const MAX_WBITS = 15;
-/* 32K LZ77 window */
-const DEF_MEM_LEVEL = 8;
-
-
-const LENGTH_CODES = 29;
-/* number of length codes, not counting the special END_BLOCK code */
-const LITERALS = 256;
-/* number of literal bytes 0..255 */
-const L_CODES = LITERALS + 1 + LENGTH_CODES;
-/* number of Literal or Length codes, including the END_BLOCK code */
-const D_CODES = 30;
-/* number of distance codes */
-const BL_CODES = 19;
-/* number of codes used to transfer the bit lengths */
-const HEAP_SIZE = 2 * L_CODES + 1;
-/* maximum heap size */
-const MAX_BITS = 15;
-/* All codes must not exceed MAX_BITS bits */
-
-const MIN_MATCH = 3;
-const MAX_MATCH = 258;
-const MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1);
-
-const PRESET_DICT = 0x20;
-
-const INIT_STATE = 42; /* zlib header -> BUSY_STATE */
-//#ifdef GZIP
-const GZIP_STATE = 57; /* gzip header -> BUSY_STATE | EXTRA_STATE */
-//#endif
-const EXTRA_STATE = 69; /* gzip extra block -> NAME_STATE */
-const NAME_STATE = 73; /* gzip file name -> COMMENT_STATE */
-const COMMENT_STATE = 91; /* gzip comment -> HCRC_STATE */
-const HCRC_STATE = 103; /* gzip header CRC -> BUSY_STATE */
-const BUSY_STATE = 113; /* deflate -> FINISH_STATE */
-const FINISH_STATE = 666; /* stream complete */
-
-const BS_NEED_MORE = 1; /* block not completed, need more input or more output */
-const BS_BLOCK_DONE = 2; /* block flush performed */
-const BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */
-const BS_FINISH_DONE = 4; /* finish done, accept no more input or output */
-
-const OS_CODE = 0x03; // Unix :) . Don't detect, use this default.
-
-const err = (strm, errorCode) => {
- strm.msg = msg[errorCode];
- return errorCode;
-};
-
-const rank = (f) => {
- return ((f) * 2) - ((f) > 4 ? 9 : 0);
-};
-
-const zero = (buf) => {
- let len = buf.length; while (--len >= 0) { buf[len] = 0; }
-};
-
-/* ===========================================================================
- * Slide the hash table when sliding the window down (could be avoided with 32
- * bit values at the expense of memory usage). We slide even when level == 0 to
- * keep the hash table consistent if we switch back to level > 0 later.
- */
-const slide_hash = (s) => {
- let n, m;
- let p;
- let wsize = s.w_size;
-
- n = s.hash_size;
- p = n;
- do {
- m = s.head[--p];
- s.head[p] = (m >= wsize ? m - wsize : 0);
- } while (--n);
- n = wsize;
-//#ifndef FASTEST
- p = n;
- do {
- m = s.prev[--p];
- s.prev[p] = (m >= wsize ? m - wsize : 0);
- /* If n is not on any hash chain, prev[n] is garbage but
- * its value will never be used.
- */
- } while (--n);
-//#endif
-};
-
-/* eslint-disable new-cap */
-let HASH_ZLIB = (s, prev, data) => ((prev << s.hash_shift) ^ data) & s.hash_mask;
-// This hash causes less collisions, https://github.com/nodeca/pako/issues/135
-// But breaks binary compatibility
-//let HASH_FAST = (s, prev, data) => ((prev << 8) + (prev >> 8) + (data << 4)) & s.hash_mask;
-let HASH = HASH_ZLIB;
-
-
-/* =========================================================================
- * Flush as much pending output as possible. All deflate() output, except for
- * some deflate_stored() output, goes through this function so some
- * applications may wish to modify it to avoid allocating a large
- * strm->next_out buffer and copying into it. (See also read_buf()).
- */
-const flush_pending = (strm) => {
- const s = strm.state;
-
- //_tr_flush_bits(s);
- let len = s.pending;
- if (len > strm.avail_out) {
- len = strm.avail_out;
- }
- if (len === 0) { return; }
-
- strm.output.set(s.pending_buf.subarray(s.pending_out, s.pending_out + len), strm.next_out);
- strm.next_out += len;
- s.pending_out += len;
- strm.total_out += len;
- strm.avail_out -= len;
- s.pending -= len;
- if (s.pending === 0) {
- s.pending_out = 0;
- }
-};
-
-
-const flush_block_only = (s, last) => {
- _tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last);
- s.block_start = s.strstart;
- flush_pending(s.strm);
-};
-
-
-const put_byte = (s, b) => {
- s.pending_buf[s.pending++] = b;
-};
-
-
-/* =========================================================================
- * Put a short in the pending buffer. The 16-bit value is put in MSB order.
- * IN assertion: the stream state is correct and there is enough room in
- * pending_buf.
- */
-const putShortMSB = (s, b) => {
-
- // put_byte(s, (Byte)(b >> 8));
-// put_byte(s, (Byte)(b & 0xff));
- s.pending_buf[s.pending++] = (b >>> 8) & 0xff;
- s.pending_buf[s.pending++] = b & 0xff;
-};
-
-
-/* ===========================================================================
- * Read a new buffer from the current input stream, update the adler32
- * and total number of bytes read. All deflate() input goes through
- * this function so some applications may wish to modify it to avoid
- * allocating a large strm->input buffer and copying from it.
- * (See also flush_pending()).
- */
-const read_buf = (strm, buf, start, size) => {
-
- let len = strm.avail_in;
-
- if (len > size) { len = size; }
- if (len === 0) { return 0; }
-
- strm.avail_in -= len;
-
- // zmemcpy(buf, strm->next_in, len);
- buf.set(strm.input.subarray(strm.next_in, strm.next_in + len), start);
- if (strm.state.wrap === 1) {
- strm.adler = adler32(strm.adler, buf, len, start);
- }
-
- else if (strm.state.wrap === 2) {
- strm.adler = crc32(strm.adler, buf, len, start);
- }
-
- strm.next_in += len;
- strm.total_in += len;
-
- return len;
-};
-
-
-/* ===========================================================================
- * Set match_start to the longest match starting at the given string and
- * return its length. Matches shorter or equal to prev_length are discarded,
- * in which case the result is equal to prev_length and match_start is
- * garbage.
- * IN assertions: cur_match is the head of the hash chain for the current
- * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
- * OUT assertion: the match length is not greater than s->lookahead.
- */
-const longest_match = (s, cur_match) => {
-
- let chain_length = s.max_chain_length; /* max hash chain length */
- let scan = s.strstart; /* current string */
- let match; /* matched string */
- let len; /* length of current match */
- let best_len = s.prev_length; /* best match length so far */
- let nice_match = s.nice_match; /* stop if match long enough */
- const limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ?
- s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/;
-
- const _win = s.window; // shortcut
-
- const wmask = s.w_mask;
- const prev = s.prev;
-
- /* Stop when cur_match becomes <= limit. To simplify the code,
- * we prevent matches with the string of window index 0.
- */
-
- const strend = s.strstart + MAX_MATCH;
- let scan_end1 = _win[scan + best_len - 1];
- let scan_end = _win[scan + best_len];
-
- /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
- * It is easy to get rid of this optimization if necessary.
- */
- // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
-
- /* Do not waste too much time if we already have a good match: */
- if (s.prev_length >= s.good_match) {
- chain_length >>= 2;
- }
- /* Do not look for matches beyond the end of the input. This is necessary
- * to make deflate deterministic.
- */
- if (nice_match > s.lookahead) { nice_match = s.lookahead; }
-
- // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
-
- do {
- // Assert(cur_match < s->strstart, "no future");
- match = cur_match;
-
- /* Skip to next match if the match length cannot increase
- * or if the match length is less than 2. Note that the checks below
- * for insufficient lookahead only occur occasionally for performance
- * reasons. Therefore uninitialized memory will be accessed, and
- * conditional jumps will be made that depend on those values.
- * However the length of the match is limited to the lookahead, so
- * the output of deflate is not affected by the uninitialized values.
- */
-
- if (_win[match + best_len] !== scan_end ||
- _win[match + best_len - 1] !== scan_end1 ||
- _win[match] !== _win[scan] ||
- _win[++match] !== _win[scan + 1]) {
- continue;
- }
-
- /* The check at best_len-1 can be removed because it will be made
- * again later. (This heuristic is not always a win.)
- * It is not necessary to compare scan[2] and match[2] since they
- * are always equal when the other bytes match, given that
- * the hash keys are equal and that HASH_BITS >= 8.
- */
- scan += 2;
- match++;
- // Assert(*scan == *match, "match[2]?");
-
- /* We check for insufficient lookahead only every 8th comparison;
- * the 256th check will be made at strstart+258.
- */
- do {
- /*jshint noempty:false*/
- } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] &&
- _win[++scan] === _win[++match] && _win[++scan] === _win[++match] &&
- _win[++scan] === _win[++match] && _win[++scan] === _win[++match] &&
- _win[++scan] === _win[++match] && _win[++scan] === _win[++match] &&
- scan < strend);
-
- // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
-
- len = MAX_MATCH - (strend - scan);
- scan = strend - MAX_MATCH;
-
- if (len > best_len) {
- s.match_start = cur_match;
- best_len = len;
- if (len >= nice_match) {
- break;
- }
- scan_end1 = _win[scan + best_len - 1];
- scan_end = _win[scan + best_len];
- }
- } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0);
-
- if (best_len <= s.lookahead) {
- return best_len;
- }
- return s.lookahead;
-};
-
-
-/* ===========================================================================
- * Fill the window when the lookahead becomes insufficient.
- * Updates strstart and lookahead.
- *
- * IN assertion: lookahead < MIN_LOOKAHEAD
- * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
- * At least one byte has been read, or avail_in == 0; reads are
- * performed for at least two bytes (required for the zip translate_eol
- * option -- not supported here).
- */
-const fill_window = (s) => {
-
- const _w_size = s.w_size;
- let n, more, str;
-
- //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead");
-
- do {
- more = s.window_size - s.lookahead - s.strstart;
-
- // JS ints have 32 bit, block below not needed
- /* Deal with !@#$% 64K limit: */
- //if (sizeof(int) <= 2) {
- // if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
- // more = wsize;
- //
- // } else if (more == (unsigned)(-1)) {
- // /* Very unlikely, but possible on 16 bit machine if
- // * strstart == 0 && lookahead == 1 (input done a byte at time)
- // */
- // more--;
- // }
- //}
-
-
- /* If the window is almost full and there is insufficient lookahead,
- * move the upper half to the lower one to make room in the upper half.
- */
- if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) {
-
- s.window.set(s.window.subarray(_w_size, _w_size + _w_size - more), 0);
- s.match_start -= _w_size;
- s.strstart -= _w_size;
- /* we now have strstart >= MAX_DIST */
- s.block_start -= _w_size;
- if (s.insert > s.strstart) {
- s.insert = s.strstart;
- }
- slide_hash(s);
- more += _w_size;
- }
- if (s.strm.avail_in === 0) {
- break;
- }
-
- /* If there was no sliding:
- * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
- * more == window_size - lookahead - strstart
- * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
- * => more >= window_size - 2*WSIZE + 2
- * In the BIG_MEM or MMAP case (not yet supported),
- * window_size == input_size + MIN_LOOKAHEAD &&
- * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
- * Otherwise, window_size == 2*WSIZE so more >= 2.
- * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
- */
- //Assert(more >= 2, "more < 2");
- n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more);
- s.lookahead += n;
-
- /* Initialize the hash value now that we have some input: */
- if (s.lookahead + s.insert >= MIN_MATCH) {
- str = s.strstart - s.insert;
- s.ins_h = s.window[str];
-
- /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */
- s.ins_h = HASH(s, s.ins_h, s.window[str + 1]);
-//#if MIN_MATCH != 3
-// Call update_hash() MIN_MATCH-3 more times
-//#endif
- while (s.insert) {
- /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */
- s.ins_h = HASH(s, s.ins_h, s.window[str + MIN_MATCH - 1]);
-
- s.prev[str & s.w_mask] = s.head[s.ins_h];
- s.head[s.ins_h] = str;
- str++;
- s.insert--;
- if (s.lookahead + s.insert < MIN_MATCH) {
- break;
- }
- }
- }
- /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
- * but this is not important since only literal bytes will be emitted.
- */
-
- } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0);
-
- /* If the WIN_INIT bytes after the end of the current data have never been
- * written, then zero those bytes in order to avoid memory check reports of
- * the use of uninitialized (or uninitialised as Julian writes) bytes by
- * the longest match routines. Update the high water mark for the next
- * time through here. WIN_INIT is set to MAX_MATCH since the longest match
- * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead.
- */
-// if (s.high_water < s.window_size) {
-// const curr = s.strstart + s.lookahead;
-// let init = 0;
-//
-// if (s.high_water < curr) {
-// /* Previous high water mark below current data -- zero WIN_INIT
-// * bytes or up to end of window, whichever is less.
-// */
-// init = s.window_size - curr;
-// if (init > WIN_INIT)
-// init = WIN_INIT;
-// zmemzero(s->window + curr, (unsigned)init);
-// s->high_water = curr + init;
-// }
-// else if (s->high_water < (ulg)curr + WIN_INIT) {
-// /* High water mark at or above current data, but below current data
-// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up
-// * to end of window, whichever is less.
-// */
-// init = (ulg)curr + WIN_INIT - s->high_water;
-// if (init > s->window_size - s->high_water)
-// init = s->window_size - s->high_water;
-// zmemzero(s->window + s->high_water, (unsigned)init);
-// s->high_water += init;
-// }
-// }
-//
-// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
-// "not enough room for search");
-};
-
-/* ===========================================================================
- * Copy without compression as much as possible from the input stream, return
- * the current block state.
- *
- * In case deflateParams() is used to later switch to a non-zero compression
- * level, s->matches (otherwise unused when storing) keeps track of the number
- * of hash table slides to perform. If s->matches is 1, then one hash table
- * slide will be done when switching. If s->matches is 2, the maximum value
- * allowed here, then the hash table will be cleared, since two or more slides
- * is the same as a clear.
- *
- * deflate_stored() is written to minimize the number of times an input byte is
- * copied. It is most efficient with large input and output buffers, which
- * maximizes the opportunites to have a single copy from next_in to next_out.
- */
-const deflate_stored = (s, flush) => {
-
- /* Smallest worthy block size when not flushing or finishing. By default
- * this is 32K. This can be as small as 507 bytes for memLevel == 1. For
- * large input and output buffers, the stored block size will be larger.
- */
- let min_block = s.pending_buf_size - 5 > s.w_size ? s.w_size : s.pending_buf_size - 5;
-
- /* Copy as many min_block or larger stored blocks directly to next_out as
- * possible. If flushing, copy the remaining available input to next_out as
- * stored blocks, if there is enough space.
- */
- let len, left, have, last = 0;
- let used = s.strm.avail_in;
- do {
- /* Set len to the maximum size block that we can copy directly with the
- * available input data and output space. Set left to how much of that
- * would be copied from what's left in the window.
- */
- len = 65535/* MAX_STORED */; /* maximum deflate stored block length */
- have = (s.bi_valid + 42) >> 3; /* number of header bytes */
- if (s.strm.avail_out < have) { /* need room for header */
- break;
- }
- /* maximum stored block length that will fit in avail_out: */
- have = s.strm.avail_out - have;
- left = s.strstart - s.block_start; /* bytes left in window */
- if (len > left + s.strm.avail_in) {
- len = left + s.strm.avail_in; /* limit len to the input */
- }
- if (len > have) {
- len = have; /* limit len to the output */
- }
-
- /* If the stored block would be less than min_block in length, or if
- * unable to copy all of the available input when flushing, then try
- * copying to the window and the pending buffer instead. Also don't
- * write an empty block when flushing -- deflate() does that.
- */
- if (len < min_block && ((len === 0 && flush !== Z_FINISH) ||
- flush === Z_NO_FLUSH ||
- len !== left + s.strm.avail_in)) {
- break;
- }
-
- /* Make a dummy stored block in pending to get the header bytes,
- * including any pending bits. This also updates the debugging counts.
- */
- last = flush === Z_FINISH && len === left + s.strm.avail_in ? 1 : 0;
- _tr_stored_block(s, 0, 0, last);
-
- /* Replace the lengths in the dummy stored block with len. */
- s.pending_buf[s.pending - 4] = len;
- s.pending_buf[s.pending - 3] = len >> 8;
- s.pending_buf[s.pending - 2] = ~len;
- s.pending_buf[s.pending - 1] = ~len >> 8;
-
- /* Write the stored block header bytes. */
- flush_pending(s.strm);
-
-//#ifdef ZLIB_DEBUG
-// /* Update debugging counts for the data about to be copied. */
-// s->compressed_len += len << 3;
-// s->bits_sent += len << 3;
-//#endif
-
- /* Copy uncompressed bytes from the window to next_out. */
- if (left) {
- if (left > len) {
- left = len;
- }
- //zmemcpy(s->strm->next_out, s->window + s->block_start, left);
- s.strm.output.set(s.window.subarray(s.block_start, s.block_start + left), s.strm.next_out);
- s.strm.next_out += left;
- s.strm.avail_out -= left;
- s.strm.total_out += left;
- s.block_start += left;
- len -= left;
- }
-
- /* Copy uncompressed bytes directly from next_in to next_out, updating
- * the check value.
- */
- if (len) {
- read_buf(s.strm, s.strm.output, s.strm.next_out, len);
- s.strm.next_out += len;
- s.strm.avail_out -= len;
- s.strm.total_out += len;
- }
- } while (last === 0);
-
- /* Update the sliding window with the last s->w_size bytes of the copied
- * data, or append all of the copied data to the existing window if less
- * than s->w_size bytes were copied. Also update the number of bytes to
- * insert in the hash tables, in the event that deflateParams() switches to
- * a non-zero compression level.
- */
- used -= s.strm.avail_in; /* number of input bytes directly copied */
- if (used) {
- /* If any input was used, then no unused input remains in the window,
- * therefore s->block_start == s->strstart.
- */
- if (used >= s.w_size) { /* supplant the previous history */
- s.matches = 2; /* clear hash */
- //zmemcpy(s->window, s->strm->next_in - s->w_size, s->w_size);
- s.window.set(s.strm.input.subarray(s.strm.next_in - s.w_size, s.strm.next_in), 0);
- s.strstart = s.w_size;
- s.insert = s.strstart;
- }
- else {
- if (s.window_size - s.strstart <= used) {
- /* Slide the window down. */
- s.strstart -= s.w_size;
- //zmemcpy(s->window, s->window + s->w_size, s->strstart);
- s.window.set(s.window.subarray(s.w_size, s.w_size + s.strstart), 0);
- if (s.matches < 2) {
- s.matches++; /* add a pending slide_hash() */
- }
- if (s.insert > s.strstart) {
- s.insert = s.strstart;
- }
- }
- //zmemcpy(s->window + s->strstart, s->strm->next_in - used, used);
- s.window.set(s.strm.input.subarray(s.strm.next_in - used, s.strm.next_in), s.strstart);
- s.strstart += used;
- s.insert += used > s.w_size - s.insert ? s.w_size - s.insert : used;
- }
- s.block_start = s.strstart;
- }
- if (s.high_water < s.strstart) {
- s.high_water = s.strstart;
- }
-
- /* If the last block was written to next_out, then done. */
- if (last) {
- return BS_FINISH_DONE;
- }
-
- /* If flushing and all input has been consumed, then done. */
- if (flush !== Z_NO_FLUSH && flush !== Z_FINISH &&
- s.strm.avail_in === 0 && s.strstart === s.block_start) {
- return BS_BLOCK_DONE;
- }
-
- /* Fill the window with any remaining input. */
- have = s.window_size - s.strstart;
- if (s.strm.avail_in > have && s.block_start >= s.w_size) {
- /* Slide the window down. */
- s.block_start -= s.w_size;
- s.strstart -= s.w_size;
- //zmemcpy(s->window, s->window + s->w_size, s->strstart);
- s.window.set(s.window.subarray(s.w_size, s.w_size + s.strstart), 0);
- if (s.matches < 2) {
- s.matches++; /* add a pending slide_hash() */
- }
- have += s.w_size; /* more space now */
- if (s.insert > s.strstart) {
- s.insert = s.strstart;
- }
- }
- if (have > s.strm.avail_in) {
- have = s.strm.avail_in;
- }
- if (have) {
- read_buf(s.strm, s.window, s.strstart, have);
- s.strstart += have;
- s.insert += have > s.w_size - s.insert ? s.w_size - s.insert : have;
- }
- if (s.high_water < s.strstart) {
- s.high_water = s.strstart;
- }
-
- /* There was not enough avail_out to write a complete worthy or flushed
- * stored block to next_out. Write a stored block to pending instead, if we
- * have enough input for a worthy block, or if flushing and there is enough
- * room for the remaining input as a stored block in the pending buffer.
- */
- have = (s.bi_valid + 42) >> 3; /* number of header bytes */
- /* maximum stored block length that will fit in pending: */
- have = s.pending_buf_size - have > 65535/* MAX_STORED */ ? 65535/* MAX_STORED */ : s.pending_buf_size - have;
- min_block = have > s.w_size ? s.w_size : have;
- left = s.strstart - s.block_start;
- if (left >= min_block ||
- ((left || flush === Z_FINISH) && flush !== Z_NO_FLUSH &&
- s.strm.avail_in === 0 && left <= have)) {
- len = left > have ? have : left;
- last = flush === Z_FINISH && s.strm.avail_in === 0 &&
- len === left ? 1 : 0;
- _tr_stored_block(s, s.block_start, len, last);
- s.block_start += len;
- flush_pending(s.strm);
- }
-
- /* We've done all we can with the available input and output. */
- return last ? BS_FINISH_STARTED : BS_NEED_MORE;
-};
-
-
-/* ===========================================================================
- * Compress as much as possible from the input stream, return the current
- * block state.
- * This function does not perform lazy evaluation of matches and inserts
- * new strings in the dictionary only for unmatched strings or for short
- * matches. It is used only for the fast compression options.
- */
-const deflate_fast = (s, flush) => {
-
- let hash_head; /* head of the hash chain */
- let bflush; /* set if current block must be flushed */
-
- for (;;) {
- /* Make sure that we always have enough lookahead, except
- * at the end of the input file. We need MAX_MATCH bytes
- * for the next match, plus MIN_MATCH bytes to insert the
- * string following the next match.
- */
- if (s.lookahead < MIN_LOOKAHEAD) {
- fill_window(s);
- if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) {
- return BS_NEED_MORE;
- }
- if (s.lookahead === 0) {
- break; /* flush the current block */
- }
- }
-
- /* Insert the string window[strstart .. strstart+2] in the
- * dictionary, and set hash_head to the head of the hash chain:
- */
- hash_head = 0/*NIL*/;
- if (s.lookahead >= MIN_MATCH) {
- /*** INSERT_STRING(s, s.strstart, hash_head); ***/
- s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + MIN_MATCH - 1]);
- hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h];
- s.head[s.ins_h] = s.strstart;
- /***/
- }
-
- /* Find the longest match, discarding those <= prev_length.
- * At this point we have always match_length < MIN_MATCH
- */
- if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) {
- /* To simplify the code, we prevent matches with the string
- * of window index 0 (in particular we have to avoid a match
- * of the string with itself at the start of the input file).
- */
- s.match_length = longest_match(s, hash_head);
- /* longest_match() sets match_start */
- }
- if (s.match_length >= MIN_MATCH) {
- // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only
-
- /*** _tr_tally_dist(s, s.strstart - s.match_start,
- s.match_length - MIN_MATCH, bflush); ***/
- bflush = _tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH);
-
- s.lookahead -= s.match_length;
-
- /* Insert new strings in the hash table only if the match length
- * is not too large. This saves time but degrades compression.
- */
- if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) {
- s.match_length--; /* string at strstart already in table */
- do {
- s.strstart++;
- /*** INSERT_STRING(s, s.strstart, hash_head); ***/
- s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + MIN_MATCH - 1]);
- hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h];
- s.head[s.ins_h] = s.strstart;
- /***/
- /* strstart never exceeds WSIZE-MAX_MATCH, so there are
- * always MIN_MATCH bytes ahead.
- */
- } while (--s.match_length !== 0);
- s.strstart++;
- } else
- {
- s.strstart += s.match_length;
- s.match_length = 0;
- s.ins_h = s.window[s.strstart];
- /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */
- s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + 1]);
-
-//#if MIN_MATCH != 3
-// Call UPDATE_HASH() MIN_MATCH-3 more times
-//#endif
- /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
- * matter since it will be recomputed at next deflate call.
- */
- }
- } else {
- /* No match, output a literal byte */
- //Tracevv((stderr,"%c", s.window[s.strstart]));
- /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/
- bflush = _tr_tally(s, 0, s.window[s.strstart]);
-
- s.lookahead--;
- s.strstart++;
- }
- if (bflush) {
- /*** FLUSH_BLOCK(s, 0); ***/
- flush_block_only(s, false);
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- /***/
- }
- }
- s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1);
- if (flush === Z_FINISH) {
- /*** FLUSH_BLOCK(s, 1); ***/
- flush_block_only(s, true);
- if (s.strm.avail_out === 0) {
- return BS_FINISH_STARTED;
- }
- /***/
- return BS_FINISH_DONE;
- }
- if (s.sym_next) {
- /*** FLUSH_BLOCK(s, 0); ***/
- flush_block_only(s, false);
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- /***/
- }
- return BS_BLOCK_DONE;
-};
-
-/* ===========================================================================
- * Same as above, but achieves better compression. We use a lazy
- * evaluation for matches: a match is finally adopted only if there is
- * no better match at the next window position.
- */
-const deflate_slow = (s, flush) => {
-
- let hash_head; /* head of hash chain */
- let bflush; /* set if current block must be flushed */
-
- let max_insert;
-
- /* Process the input block. */
- for (;;) {
- /* Make sure that we always have enough lookahead, except
- * at the end of the input file. We need MAX_MATCH bytes
- * for the next match, plus MIN_MATCH bytes to insert the
- * string following the next match.
- */
- if (s.lookahead < MIN_LOOKAHEAD) {
- fill_window(s);
- if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) {
- return BS_NEED_MORE;
- }
- if (s.lookahead === 0) { break; } /* flush the current block */
- }
-
- /* Insert the string window[strstart .. strstart+2] in the
- * dictionary, and set hash_head to the head of the hash chain:
- */
- hash_head = 0/*NIL*/;
- if (s.lookahead >= MIN_MATCH) {
- /*** INSERT_STRING(s, s.strstart, hash_head); ***/
- s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + MIN_MATCH - 1]);
- hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h];
- s.head[s.ins_h] = s.strstart;
- /***/
- }
-
- /* Find the longest match, discarding those <= prev_length.
- */
- s.prev_length = s.match_length;
- s.prev_match = s.match_start;
- s.match_length = MIN_MATCH - 1;
-
- if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match &&
- s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) {
- /* To simplify the code, we prevent matches with the string
- * of window index 0 (in particular we have to avoid a match
- * of the string with itself at the start of the input file).
- */
- s.match_length = longest_match(s, hash_head);
- /* longest_match() sets match_start */
-
- if (s.match_length <= 5 &&
- (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) {
-
- /* If prev_match is also MIN_MATCH, match_start is garbage
- * but we will ignore the current match anyway.
- */
- s.match_length = MIN_MATCH - 1;
- }
- }
- /* If there was a match at the previous step and the current
- * match is not better, output the previous match:
- */
- if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) {
- max_insert = s.strstart + s.lookahead - MIN_MATCH;
- /* Do not insert strings in hash table beyond this. */
-
- //check_match(s, s.strstart-1, s.prev_match, s.prev_length);
-
- /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match,
- s.prev_length - MIN_MATCH, bflush);***/
- bflush = _tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH);
- /* Insert in hash table all strings up to the end of the match.
- * strstart-1 and strstart are already inserted. If there is not
- * enough lookahead, the last two strings are not inserted in
- * the hash table.
- */
- s.lookahead -= s.prev_length - 1;
- s.prev_length -= 2;
- do {
- if (++s.strstart <= max_insert) {
- /*** INSERT_STRING(s, s.strstart, hash_head); ***/
- s.ins_h = HASH(s, s.ins_h, s.window[s.strstart + MIN_MATCH - 1]);
- hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h];
- s.head[s.ins_h] = s.strstart;
- /***/
- }
- } while (--s.prev_length !== 0);
- s.match_available = 0;
- s.match_length = MIN_MATCH - 1;
- s.strstart++;
-
- if (bflush) {
- /*** FLUSH_BLOCK(s, 0); ***/
- flush_block_only(s, false);
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- /***/
- }
-
- } else if (s.match_available) {
- /* If there was no match at the previous position, output a
- * single literal. If there was a match but the current match
- * is longer, truncate the previous match to a single literal.
- */
- //Tracevv((stderr,"%c", s->window[s->strstart-1]));
- /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/
- bflush = _tr_tally(s, 0, s.window[s.strstart - 1]);
-
- if (bflush) {
- /*** FLUSH_BLOCK_ONLY(s, 0) ***/
- flush_block_only(s, false);
- /***/
- }
- s.strstart++;
- s.lookahead--;
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- } else {
- /* There is no previous match to compare with, wait for
- * the next step to decide.
- */
- s.match_available = 1;
- s.strstart++;
- s.lookahead--;
- }
- }
- //Assert (flush != Z_NO_FLUSH, "no flush?");
- if (s.match_available) {
- //Tracevv((stderr,"%c", s->window[s->strstart-1]));
- /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/
- bflush = _tr_tally(s, 0, s.window[s.strstart - 1]);
-
- s.match_available = 0;
- }
- s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1;
- if (flush === Z_FINISH) {
- /*** FLUSH_BLOCK(s, 1); ***/
- flush_block_only(s, true);
- if (s.strm.avail_out === 0) {
- return BS_FINISH_STARTED;
- }
- /***/
- return BS_FINISH_DONE;
- }
- if (s.sym_next) {
- /*** FLUSH_BLOCK(s, 0); ***/
- flush_block_only(s, false);
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- /***/
- }
-
- return BS_BLOCK_DONE;
-};
-
-
-/* ===========================================================================
- * For Z_RLE, simply look for runs of bytes, generate matches only of distance
- * one. Do not maintain a hash table. (It will be regenerated if this run of
- * deflate switches away from Z_RLE.)
- */
-const deflate_rle = (s, flush) => {
-
- let bflush; /* set if current block must be flushed */
- let prev; /* byte at distance one to match */
- let scan, strend; /* scan goes up to strend for length of run */
-
- const _win = s.window;
-
- for (;;) {
- /* Make sure that we always have enough lookahead, except
- * at the end of the input file. We need MAX_MATCH bytes
- * for the longest run, plus one for the unrolled loop.
- */
- if (s.lookahead <= MAX_MATCH) {
- fill_window(s);
- if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) {
- return BS_NEED_MORE;
- }
- if (s.lookahead === 0) { break; } /* flush the current block */
- }
-
- /* See how many times the previous byte repeats */
- s.match_length = 0;
- if (s.lookahead >= MIN_MATCH && s.strstart > 0) {
- scan = s.strstart - 1;
- prev = _win[scan];
- if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) {
- strend = s.strstart + MAX_MATCH;
- do {
- /*jshint noempty:false*/
- } while (prev === _win[++scan] && prev === _win[++scan] &&
- prev === _win[++scan] && prev === _win[++scan] &&
- prev === _win[++scan] && prev === _win[++scan] &&
- prev === _win[++scan] && prev === _win[++scan] &&
- scan < strend);
- s.match_length = MAX_MATCH - (strend - scan);
- if (s.match_length > s.lookahead) {
- s.match_length = s.lookahead;
- }
- }
- //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan");
- }
-
- /* Emit match if have run of MIN_MATCH or longer, else emit literal */
- if (s.match_length >= MIN_MATCH) {
- //check_match(s, s.strstart, s.strstart - 1, s.match_length);
-
- /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/
- bflush = _tr_tally(s, 1, s.match_length - MIN_MATCH);
-
- s.lookahead -= s.match_length;
- s.strstart += s.match_length;
- s.match_length = 0;
- } else {
- /* No match, output a literal byte */
- //Tracevv((stderr,"%c", s->window[s->strstart]));
- /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/
- bflush = _tr_tally(s, 0, s.window[s.strstart]);
-
- s.lookahead--;
- s.strstart++;
- }
- if (bflush) {
- /*** FLUSH_BLOCK(s, 0); ***/
- flush_block_only(s, false);
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- /***/
- }
- }
- s.insert = 0;
- if (flush === Z_FINISH) {
- /*** FLUSH_BLOCK(s, 1); ***/
- flush_block_only(s, true);
- if (s.strm.avail_out === 0) {
- return BS_FINISH_STARTED;
- }
- /***/
- return BS_FINISH_DONE;
- }
- if (s.sym_next) {
- /*** FLUSH_BLOCK(s, 0); ***/
- flush_block_only(s, false);
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- /***/
- }
- return BS_BLOCK_DONE;
-};
-
-/* ===========================================================================
- * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table.
- * (It will be regenerated if this run of deflate switches away from Huffman.)
- */
-const deflate_huff = (s, flush) => {
-
- let bflush; /* set if current block must be flushed */
-
- for (;;) {
- /* Make sure that we have a literal to write. */
- if (s.lookahead === 0) {
- fill_window(s);
- if (s.lookahead === 0) {
- if (flush === Z_NO_FLUSH) {
- return BS_NEED_MORE;
- }
- break; /* flush the current block */
- }
- }
-
- /* Output a literal byte */
- s.match_length = 0;
- //Tracevv((stderr,"%c", s->window[s->strstart]));
- /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/
- bflush = _tr_tally(s, 0, s.window[s.strstart]);
- s.lookahead--;
- s.strstart++;
- if (bflush) {
- /*** FLUSH_BLOCK(s, 0); ***/
- flush_block_only(s, false);
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- /***/
- }
- }
- s.insert = 0;
- if (flush === Z_FINISH) {
- /*** FLUSH_BLOCK(s, 1); ***/
- flush_block_only(s, true);
- if (s.strm.avail_out === 0) {
- return BS_FINISH_STARTED;
- }
- /***/
- return BS_FINISH_DONE;
- }
- if (s.sym_next) {
- /*** FLUSH_BLOCK(s, 0); ***/
- flush_block_only(s, false);
- if (s.strm.avail_out === 0) {
- return BS_NEED_MORE;
- }
- /***/
- }
- return BS_BLOCK_DONE;
-};
-
-/* Values for max_lazy_match, good_match and max_chain_length, depending on
- * the desired pack level (0..9). The values given below have been tuned to
- * exclude worst case performance for pathological files. Better values may be
- * found for specific files.
- */
-function Config(good_length, max_lazy, nice_length, max_chain, func) {
-
- this.good_length = good_length;
- this.max_lazy = max_lazy;
- this.nice_length = nice_length;
- this.max_chain = max_chain;
- this.func = func;
-}
-
-const configuration_table = [
- /* good lazy nice chain */
- new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */
- new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */
- new Config(4, 5, 16, 8, deflate_fast), /* 2 */
- new Config(4, 6, 32, 32, deflate_fast), /* 3 */
-
- new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */
- new Config(8, 16, 32, 32, deflate_slow), /* 5 */
- new Config(8, 16, 128, 128, deflate_slow), /* 6 */
- new Config(8, 32, 128, 256, deflate_slow), /* 7 */
- new Config(32, 128, 258, 1024, deflate_slow), /* 8 */
- new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */
-];
-
-
-/* ===========================================================================
- * Initialize the "longest match" routines for a new zlib stream
- */
-const lm_init = (s) => {
-
- s.window_size = 2 * s.w_size;
-
- /*** CLEAR_HASH(s); ***/
- zero(s.head); // Fill with NIL (= 0);
-
- /* Set the default configuration parameters:
- */
- s.max_lazy_match = configuration_table[s.level].max_lazy;
- s.good_match = configuration_table[s.level].good_length;
- s.nice_match = configuration_table[s.level].nice_length;
- s.max_chain_length = configuration_table[s.level].max_chain;
-
- s.strstart = 0;
- s.block_start = 0;
- s.lookahead = 0;
- s.insert = 0;
- s.match_length = s.prev_length = MIN_MATCH - 1;
- s.match_available = 0;
- s.ins_h = 0;
-};
-
-
-function DeflateState() {
- this.strm = null; /* pointer back to this zlib stream */
- this.status = 0; /* as the name implies */
- this.pending_buf = null; /* output still pending */
- this.pending_buf_size = 0; /* size of pending_buf */
- this.pending_out = 0; /* next pending byte to output to the stream */
- this.pending = 0; /* nb of bytes in the pending buffer */
- this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */
- this.gzhead = null; /* gzip header information to write */
- this.gzindex = 0; /* where in extra, name, or comment */
- this.method = Z_DEFLATED; /* can only be DEFLATED */
- this.last_flush = -1; /* value of flush param for previous deflate call */
-
- this.w_size = 0; /* LZ77 window size (32K by default) */
- this.w_bits = 0; /* log2(w_size) (8..16) */
- this.w_mask = 0; /* w_size - 1 */
-
- this.window = null;
- /* Sliding window. Input bytes are read into the second half of the window,
- * and move to the first half later to keep a dictionary of at least wSize
- * bytes. With this organization, matches are limited to a distance of
- * wSize-MAX_MATCH bytes, but this ensures that IO is always
- * performed with a length multiple of the block size.
- */
-
- this.window_size = 0;
- /* Actual size of window: 2*wSize, except when the user input buffer
- * is directly used as sliding window.
- */
-
- this.prev = null;
- /* Link to older string with same hash index. To limit the size of this
- * array to 64K, this link is maintained only for the last 32K strings.
- * An index in this array is thus a window index modulo 32K.
- */
-
- this.head = null; /* Heads of the hash chains or NIL. */
-
- this.ins_h = 0; /* hash index of string to be inserted */
- this.hash_size = 0; /* number of elements in hash table */
- this.hash_bits = 0; /* log2(hash_size) */
- this.hash_mask = 0; /* hash_size-1 */
-
- this.hash_shift = 0;
- /* Number of bits by which ins_h must be shifted at each input
- * step. It must be such that after MIN_MATCH steps, the oldest
- * byte no longer takes part in the hash key, that is:
- * hash_shift * MIN_MATCH >= hash_bits
- */
-
- this.block_start = 0;
- /* Window position at the beginning of the current output block. Gets
- * negative when the window is moved backwards.
- */
-
- this.match_length = 0; /* length of best match */
- this.prev_match = 0; /* previous match */
- this.match_available = 0; /* set if previous match exists */
- this.strstart = 0; /* start of string to insert */
- this.match_start = 0; /* start of matching string */
- this.lookahead = 0; /* number of valid bytes ahead in window */
-
- this.prev_length = 0;
- /* Length of the best match at previous step. Matches not greater than this
- * are discarded. This is used in the lazy match evaluation.
- */
-
- this.max_chain_length = 0;
- /* To speed up deflation, hash chains are never searched beyond this
- * length. A higher limit improves compression ratio but degrades the
- * speed.
- */
-
- this.max_lazy_match = 0;
- /* Attempt to find a better match only when the current match is strictly
- * smaller than this value. This mechanism is used only for compression
- * levels >= 4.
- */
- // That's alias to max_lazy_match, don't use directly
- //this.max_insert_length = 0;
- /* Insert new strings in the hash table only if the match length is not
- * greater than this length. This saves time but degrades compression.
- * max_insert_length is used only for compression levels <= 3.
- */
-
- this.level = 0; /* compression level (1..9) */
- this.strategy = 0; /* favor or force Huffman coding*/
-
- this.good_match = 0;
- /* Use a faster search when the previous match is longer than this */
-
- this.nice_match = 0; /* Stop searching when current match exceeds this */
-
- /* used by trees.c: */
-
- /* Didn't use ct_data typedef below to suppress compiler warning */
-
- // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */
- // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
- // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */
-
- // Use flat array of DOUBLE size, with interleaved fata,
- // because JS does not support effective
- this.dyn_ltree = new Uint16Array(HEAP_SIZE * 2);
- this.dyn_dtree = new Uint16Array((2 * D_CODES + 1) * 2);
- this.bl_tree = new Uint16Array((2 * BL_CODES + 1) * 2);
- zero(this.dyn_ltree);
- zero(this.dyn_dtree);
- zero(this.bl_tree);
-
- this.l_desc = null; /* desc. for literal tree */
- this.d_desc = null; /* desc. for distance tree */
- this.bl_desc = null; /* desc. for bit length tree */
-
- //ush bl_count[MAX_BITS+1];
- this.bl_count = new Uint16Array(MAX_BITS + 1);
- /* number of codes at each bit length for an optimal tree */
-
- //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */
- this.heap = new Uint16Array(2 * L_CODES + 1); /* heap used to build the Huffman trees */
- zero(this.heap);
-
- this.heap_len = 0; /* number of elements in the heap */
- this.heap_max = 0; /* element of largest frequency */
- /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
- * The same heap array is used to build all trees.
- */
-
- this.depth = new Uint16Array(2 * L_CODES + 1); //uch depth[2*L_CODES+1];
- zero(this.depth);
- /* Depth of each subtree used as tie breaker for trees of equal frequency
- */
-
- this.sym_buf = 0; /* buffer for distances and literals/lengths */
-
- this.lit_bufsize = 0;
- /* Size of match buffer for literals/lengths. There are 4 reasons for
- * limiting lit_bufsize to 64K:
- * - frequencies can be kept in 16 bit counters
- * - if compression is not successful for the first block, all input
- * data is still in the window so we can still emit a stored block even
- * when input comes from standard input. (This can also be done for
- * all blocks if lit_bufsize is not greater than 32K.)
- * - if compression is not successful for a file smaller than 64K, we can
- * even emit a stored file instead of a stored block (saving 5 bytes).
- * This is applicable only for zip (not gzip or zlib).
- * - creating new Huffman trees less frequently may not provide fast
- * adaptation to changes in the input data statistics. (Take for
- * example a binary file with poorly compressible code followed by
- * a highly compressible string table.) Smaller buffer sizes give
- * fast adaptation but have of course the overhead of transmitting
- * trees more frequently.
- * - I can't count above 4
- */
-
- this.sym_next = 0; /* running index in sym_buf */
- this.sym_end = 0; /* symbol table full when sym_next reaches this */
-
- this.opt_len = 0; /* bit length of current block with optimal trees */
- this.static_len = 0; /* bit length of current block with static trees */
- this.matches = 0; /* number of string matches in current block */
- this.insert = 0; /* bytes at end of window left to insert */
-
-
- this.bi_buf = 0;
- /* Output buffer. bits are inserted starting at the bottom (least
- * significant bits).
- */
- this.bi_valid = 0;
- /* Number of valid bits in bi_buf. All bits above the last valid bit
- * are always zero.
- */
-
- // Used for window memory init. We safely ignore it for JS. That makes
- // sense only for pointers and memory check tools.
- //this.high_water = 0;
- /* High water mark offset in window for initialized bytes -- bytes above
- * this are set to zero in order to avoid memory check warnings when
- * longest match routines access bytes past the input. This is then
- * updated to the new high water mark.
- */
-}
-
-
-/* =========================================================================
- * Check for a valid deflate stream state. Return 0 if ok, 1 if not.
- */
-const deflateStateCheck = (strm) => {
-
- if (!strm) {
- return 1;
- }
- const s = strm.state;
- if (!s || s.strm !== strm || (s.status !== INIT_STATE &&
-//#ifdef GZIP
- s.status !== GZIP_STATE &&
-//#endif
- s.status !== EXTRA_STATE &&
- s.status !== NAME_STATE &&
- s.status !== COMMENT_STATE &&
- s.status !== HCRC_STATE &&
- s.status !== BUSY_STATE &&
- s.status !== FINISH_STATE)) {
- return 1;
- }
- return 0;
-};
-
-
-const deflateResetKeep = (strm) => {
-
- if (deflateStateCheck(strm)) {
- return err(strm, Z_STREAM_ERROR);
- }
-
- strm.total_in = strm.total_out = 0;
- strm.data_type = Z_UNKNOWN;
-
- const s = strm.state;
- s.pending = 0;
- s.pending_out = 0;
-
- if (s.wrap < 0) {
- s.wrap = -s.wrap;
- /* was made negative by deflate(..., Z_FINISH); */
- }
- s.status =
-//#ifdef GZIP
- s.wrap === 2 ? GZIP_STATE :
-//#endif
- s.wrap ? INIT_STATE : BUSY_STATE;
- strm.adler = (s.wrap === 2) ?
- 0 // crc32(0, Z_NULL, 0)
- :
- 1; // adler32(0, Z_NULL, 0)
- s.last_flush = -2;
- _tr_init(s);
- return Z_OK;
-};
-
-
-const deflateReset = (strm) => {
-
- const ret = deflateResetKeep(strm);
- if (ret === Z_OK) {
- lm_init(strm.state);
- }
- return ret;
-};
-
-
-const deflateSetHeader = (strm, head) => {
-
- if (deflateStateCheck(strm) || strm.state.wrap !== 2) {
- return Z_STREAM_ERROR;
- }
- strm.state.gzhead = head;
- return Z_OK;
-};
-
-
-const deflateInit2 = (strm, level, method, windowBits, memLevel, strategy) => {
-
- if (!strm) { // === Z_NULL
- return Z_STREAM_ERROR;
- }
- let wrap = 1;
-
- if (level === Z_DEFAULT_COMPRESSION) {
- level = 6;
- }
-
- if (windowBits < 0) { /* suppress zlib wrapper */
- wrap = 0;
- windowBits = -windowBits;
- }
-
- else if (windowBits > 15) {
- wrap = 2; /* write gzip wrapper instead */
- windowBits -= 16;
- }
-
-
- if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED ||
- windowBits < 8 || windowBits > 15 || level < 0 || level > 9 ||
- strategy < 0 || strategy > Z_FIXED || (windowBits === 8 && wrap !== 1)) {
- return err(strm, Z_STREAM_ERROR);
- }
-
-
- if (windowBits === 8) {
- windowBits = 9;
- }
- /* until 256-byte window bug fixed */
-
- const s = new DeflateState();
-
- strm.state = s;
- s.strm = strm;
- s.status = INIT_STATE; /* to pass state test in deflateReset() */
-
- s.wrap = wrap;
- s.gzhead = null;
- s.w_bits = windowBits;
- s.w_size = 1 << s.w_bits;
- s.w_mask = s.w_size - 1;
-
- s.hash_bits = memLevel + 7;
- s.hash_size = 1 << s.hash_bits;
- s.hash_mask = s.hash_size - 1;
- s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH);
-
- s.window = new Uint8Array(s.w_size * 2);
- s.head = new Uint16Array(s.hash_size);
- s.prev = new Uint16Array(s.w_size);
-
- // Don't need mem init magic for JS.
- //s.high_water = 0; /* nothing written to s->window yet */
-
- s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
-
- /* We overlay pending_buf and sym_buf. This works since the average size
- * for length/distance pairs over any compressed block is assured to be 31
- * bits or less.
- *
- * Analysis: The longest fixed codes are a length code of 8 bits plus 5
- * extra bits, for lengths 131 to 257. The longest fixed distance codes are
- * 5 bits plus 13 extra bits, for distances 16385 to 32768. The longest
- * possible fixed-codes length/distance pair is then 31 bits total.
- *
- * sym_buf starts one-fourth of the way into pending_buf. So there are
- * three bytes in sym_buf for every four bytes in pending_buf. Each symbol
- * in sym_buf is three bytes -- two for the distance and one for the
- * literal/length. As each symbol is consumed, the pointer to the next
- * sym_buf value to read moves forward three bytes. From that symbol, up to
- * 31 bits are written to pending_buf. The closest the written pending_buf
- * bits gets to the next sym_buf symbol to read is just before the last
- * code is written. At that time, 31*(n-2) bits have been written, just
- * after 24*(n-2) bits have been consumed from sym_buf. sym_buf starts at
- * 8*n bits into pending_buf. (Note that the symbol buffer fills when n-1
- * symbols are written.) The closest the writing gets to what is unread is
- * then n+14 bits. Here n is lit_bufsize, which is 16384 by default, and
- * can range from 128 to 32768.
- *
- * Therefore, at a minimum, there are 142 bits of space between what is
- * written and what is read in the overlain buffers, so the symbols cannot
- * be overwritten by the compressed data. That space is actually 139 bits,
- * due to the three-bit fixed-code block header.
- *
- * That covers the case where either Z_FIXED is specified, forcing fixed
- * codes, or when the use of fixed codes is chosen, because that choice
- * results in a smaller compressed block than dynamic codes. That latter
- * condition then assures that the above analysis also covers all dynamic
- * blocks. A dynamic-code block will only be chosen to be emitted if it has
- * fewer bits than a fixed-code block would for the same set of symbols.
- * Therefore its average symbol length is assured to be less than 31. So
- * the compressed data for a dynamic block also cannot overwrite the
- * symbols from which it is being constructed.
- */
-
- s.pending_buf_size = s.lit_bufsize * 4;
- s.pending_buf = new Uint8Array(s.pending_buf_size);
-
- // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`)
- //s->sym_buf = s->pending_buf + s->lit_bufsize;
- s.sym_buf = s.lit_bufsize;
-
- //s->sym_end = (s->lit_bufsize - 1) * 3;
- s.sym_end = (s.lit_bufsize - 1) * 3;
- /* We avoid equality with lit_bufsize*3 because of wraparound at 64K
- * on 16 bit machines and because stored blocks are restricted to
- * 64K-1 bytes.
- */
-
- s.level = level;
- s.strategy = strategy;
- s.method = method;
-
- return deflateReset(strm);
-};
-
-const deflateInit = (strm, level) => {
-
- return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY);
-};
-
-
-/* ========================================================================= */
-const deflate = (strm, flush) => {
-
- if (deflateStateCheck(strm) || flush > Z_BLOCK || flush < 0) {
- return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR;
- }
-
- const s = strm.state;
-
- if (!strm.output ||
- (strm.avail_in !== 0 && !strm.input) ||
- (s.status === FINISH_STATE && flush !== Z_FINISH)) {
- return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR);
- }
-
- const old_flush = s.last_flush;
- s.last_flush = flush;
-
- /* Flush as much pending output as possible */
- if (s.pending !== 0) {
- flush_pending(strm);
- if (strm.avail_out === 0) {
- /* Since avail_out is 0, deflate will be called again with
- * more output space, but possibly with both pending and
- * avail_in equal to zero. There won't be anything to do,
- * but this is not an error situation so make sure we
- * return OK instead of BUF_ERROR at next call of deflate:
- */
- s.last_flush = -1;
- return Z_OK;
- }
-
- /* Make sure there is something to do and avoid duplicate consecutive
- * flushes. For repeated and useless calls with Z_FINISH, we keep
- * returning Z_STREAM_END instead of Z_BUF_ERROR.
- */
- } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) &&
- flush !== Z_FINISH) {
- return err(strm, Z_BUF_ERROR);
- }
-
- /* User must not provide more input after the first FINISH: */
- if (s.status === FINISH_STATE && strm.avail_in !== 0) {
- return err(strm, Z_BUF_ERROR);
- }
-
- /* Write the header */
- if (s.status === INIT_STATE && s.wrap === 0) {
- s.status = BUSY_STATE;
- }
- if (s.status === INIT_STATE) {
- /* zlib header */
- let header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8;
- let level_flags = -1;
-
- if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) {
- level_flags = 0;
- } else if (s.level < 6) {
- level_flags = 1;
- } else if (s.level === 6) {
- level_flags = 2;
- } else {
- level_flags = 3;
- }
- header |= (level_flags << 6);
- if (s.strstart !== 0) { header |= PRESET_DICT; }
- header += 31 - (header % 31);
-
- putShortMSB(s, header);
-
- /* Save the adler32 of the preset dictionary: */
- if (s.strstart !== 0) {
- putShortMSB(s, strm.adler >>> 16);
- putShortMSB(s, strm.adler & 0xffff);
- }
- strm.adler = 1; // adler32(0L, Z_NULL, 0);
- s.status = BUSY_STATE;
-
- /* Compression must start with an empty pending buffer */
- flush_pending(strm);
- if (s.pending !== 0) {
- s.last_flush = -1;
- return Z_OK;
- }
- }
-//#ifdef GZIP
- if (s.status === GZIP_STATE) {
- /* gzip header */
- strm.adler = 0; //crc32(0L, Z_NULL, 0);
- put_byte(s, 31);
- put_byte(s, 139);
- put_byte(s, 8);
- if (!s.gzhead) { // s->gzhead == Z_NULL
- put_byte(s, 0);
- put_byte(s, 0);
- put_byte(s, 0);
- put_byte(s, 0);
- put_byte(s, 0);
- put_byte(s, s.level === 9 ? 2 :
- (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ?
- 4 : 0));
- put_byte(s, OS_CODE);
- s.status = BUSY_STATE;
-
- /* Compression must start with an empty pending buffer */
- flush_pending(strm);
- if (s.pending !== 0) {
- s.last_flush = -1;
- return Z_OK;
- }
- }
- else {
- put_byte(s, (s.gzhead.text ? 1 : 0) +
- (s.gzhead.hcrc ? 2 : 0) +
- (!s.gzhead.extra ? 0 : 4) +
- (!s.gzhead.name ? 0 : 8) +
- (!s.gzhead.comment ? 0 : 16)
- );
- put_byte(s, s.gzhead.time & 0xff);
- put_byte(s, (s.gzhead.time >> 8) & 0xff);
- put_byte(s, (s.gzhead.time >> 16) & 0xff);
- put_byte(s, (s.gzhead.time >> 24) & 0xff);
- put_byte(s, s.level === 9 ? 2 :
- (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ?
- 4 : 0));
- put_byte(s, s.gzhead.os & 0xff);
- if (s.gzhead.extra && s.gzhead.extra.length) {
- put_byte(s, s.gzhead.extra.length & 0xff);
- put_byte(s, (s.gzhead.extra.length >> 8) & 0xff);
- }
- if (s.gzhead.hcrc) {
- strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0);
- }
- s.gzindex = 0;
- s.status = EXTRA_STATE;
- }
- }
- if (s.status === EXTRA_STATE) {
- if (s.gzhead.extra/* != Z_NULL*/) {
- let beg = s.pending; /* start of bytes to update crc */
- let left = (s.gzhead.extra.length & 0xffff) - s.gzindex;
- while (s.pending + left > s.pending_buf_size) {
- let copy = s.pending_buf_size - s.pending;
- // zmemcpy(s.pending_buf + s.pending,
- // s.gzhead.extra + s.gzindex, copy);
- s.pending_buf.set(s.gzhead.extra.subarray(s.gzindex, s.gzindex + copy), s.pending);
- s.pending = s.pending_buf_size;
- //--- HCRC_UPDATE(beg) ---//
- if (s.gzhead.hcrc && s.pending > beg) {
- strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);
- }
- //---//
- s.gzindex += copy;
- flush_pending(strm);
- if (s.pending !== 0) {
- s.last_flush = -1;
- return Z_OK;
- }
- beg = 0;
- left -= copy;
- }
- // JS specific: s.gzhead.extra may be TypedArray or Array for backward compatibility
- // TypedArray.slice and TypedArray.from don't exist in IE10-IE11
- let gzhead_extra = new Uint8Array(s.gzhead.extra);
- // zmemcpy(s->pending_buf + s->pending,
- // s->gzhead->extra + s->gzindex, left);
- s.pending_buf.set(gzhead_extra.subarray(s.gzindex, s.gzindex + left), s.pending);
- s.pending += left;
- //--- HCRC_UPDATE(beg) ---//
- if (s.gzhead.hcrc && s.pending > beg) {
- strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);
- }
- //---//
- s.gzindex = 0;
- }
- s.status = NAME_STATE;
- }
- if (s.status === NAME_STATE) {
- if (s.gzhead.name/* != Z_NULL*/) {
- let beg = s.pending; /* start of bytes to update crc */
- let val;
- do {
- if (s.pending === s.pending_buf_size) {
- //--- HCRC_UPDATE(beg) ---//
- if (s.gzhead.hcrc && s.pending > beg) {
- strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);
- }
- //---//
- flush_pending(strm);
- if (s.pending !== 0) {
- s.last_flush = -1;
- return Z_OK;
- }
- beg = 0;
- }
- // JS specific: little magic to add zero terminator to end of string
- if (s.gzindex < s.gzhead.name.length) {
- val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff;
- } else {
- val = 0;
- }
- put_byte(s, val);
- } while (val !== 0);
- //--- HCRC_UPDATE(beg) ---//
- if (s.gzhead.hcrc && s.pending > beg) {
- strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);
- }
- //---//
- s.gzindex = 0;
- }
- s.status = COMMENT_STATE;
- }
- if (s.status === COMMENT_STATE) {
- if (s.gzhead.comment/* != Z_NULL*/) {
- let beg = s.pending; /* start of bytes to update crc */
- let val;
- do {
- if (s.pending === s.pending_buf_size) {
- //--- HCRC_UPDATE(beg) ---//
- if (s.gzhead.hcrc && s.pending > beg) {
- strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);
- }
- //---//
- flush_pending(strm);
- if (s.pending !== 0) {
- s.last_flush = -1;
- return Z_OK;
- }
- beg = 0;
- }
- // JS specific: little magic to add zero terminator to end of string
- if (s.gzindex < s.gzhead.comment.length) {
- val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff;
- } else {
- val = 0;
- }
- put_byte(s, val);
- } while (val !== 0);
- //--- HCRC_UPDATE(beg) ---//
- if (s.gzhead.hcrc && s.pending > beg) {
- strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);
- }
- //---//
- }
- s.status = HCRC_STATE;
- }
- if (s.status === HCRC_STATE) {
- if (s.gzhead.hcrc) {
- if (s.pending + 2 > s.pending_buf_size) {
- flush_pending(strm);
- if (s.pending !== 0) {
- s.last_flush = -1;
- return Z_OK;
- }
- }
- put_byte(s, strm.adler & 0xff);
- put_byte(s, (strm.adler >> 8) & 0xff);
- strm.adler = 0; //crc32(0L, Z_NULL, 0);
- }
- s.status = BUSY_STATE;
-
- /* Compression must start with an empty pending buffer */
- flush_pending(strm);
- if (s.pending !== 0) {
- s.last_flush = -1;
- return Z_OK;
- }
- }
-//#endif
-
- /* Start a new block or continue the current one.
- */
- if (strm.avail_in !== 0 || s.lookahead !== 0 ||
- (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) {
- let bstate = s.level === 0 ? deflate_stored(s, flush) :
- s.strategy === Z_HUFFMAN_ONLY ? deflate_huff(s, flush) :
- s.strategy === Z_RLE ? deflate_rle(s, flush) :
- configuration_table[s.level].func(s, flush);
-
- if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) {
- s.status = FINISH_STATE;
- }
- if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) {
- if (strm.avail_out === 0) {
- s.last_flush = -1;
- /* avoid BUF_ERROR next call, see above */
- }
- return Z_OK;
- /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
- * of deflate should use the same flush parameter to make sure
- * that the flush is complete. So we don't have to output an
- * empty block here, this will be done at next call. This also
- * ensures that for a very small output buffer, we emit at most
- * one empty block.
- */
- }
- if (bstate === BS_BLOCK_DONE) {
- if (flush === Z_PARTIAL_FLUSH) {
- _tr_align(s);
- }
- else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */
-
- _tr_stored_block(s, 0, 0, false);
- /* For a full flush, this empty block will be recognized
- * as a special marker by inflate_sync().
- */
- if (flush === Z_FULL_FLUSH) {
- /*** CLEAR_HASH(s); ***/ /* forget history */
- zero(s.head); // Fill with NIL (= 0);
-
- if (s.lookahead === 0) {
- s.strstart = 0;
- s.block_start = 0;
- s.insert = 0;
- }
- }
- }
- flush_pending(strm);
- if (strm.avail_out === 0) {
- s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */
- return Z_OK;
- }
- }
- }
-
- if (flush !== Z_FINISH) { return Z_OK; }
- if (s.wrap <= 0) { return Z_STREAM_END; }
-
- /* Write the trailer */
- if (s.wrap === 2) {
- put_byte(s, strm.adler & 0xff);
- put_byte(s, (strm.adler >> 8) & 0xff);
- put_byte(s, (strm.adler >> 16) & 0xff);
- put_byte(s, (strm.adler >> 24) & 0xff);
- put_byte(s, strm.total_in & 0xff);
- put_byte(s, (strm.total_in >> 8) & 0xff);
- put_byte(s, (strm.total_in >> 16) & 0xff);
- put_byte(s, (strm.total_in >> 24) & 0xff);
- }
- else
- {
- putShortMSB(s, strm.adler >>> 16);
- putShortMSB(s, strm.adler & 0xffff);
- }
-
- flush_pending(strm);
- /* If avail_out is zero, the application will call deflate again
- * to flush the rest.
- */
- if (s.wrap > 0) { s.wrap = -s.wrap; }
- /* write the trailer only once! */
- return s.pending !== 0 ? Z_OK : Z_STREAM_END;
-};
-
-
-const deflateEnd = (strm) => {
-
- if (deflateStateCheck(strm)) {
- return Z_STREAM_ERROR;
- }
-
- const status = strm.state.status;
-
- strm.state = null;
-
- return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK;
-};
-
-
-/* =========================================================================
- * Initializes the compression dictionary from the given byte
- * sequence without producing any compressed output.
- */
-const deflateSetDictionary = (strm, dictionary) => {
-
- let dictLength = dictionary.length;
-
- if (deflateStateCheck(strm)) {
- return Z_STREAM_ERROR;
- }
-
- const s = strm.state;
- const wrap = s.wrap;
-
- if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) {
- return Z_STREAM_ERROR;
- }
-
- /* when using zlib wrappers, compute Adler-32 for provided dictionary */
- if (wrap === 1) {
- /* adler32(strm->adler, dictionary, dictLength); */
- strm.adler = adler32(strm.adler, dictionary, dictLength, 0);
- }
-
- s.wrap = 0; /* avoid computing Adler-32 in read_buf */
-
- /* if dictionary would fill window, just replace the history */
- if (dictLength >= s.w_size) {
- if (wrap === 0) { /* already empty otherwise */
- /*** CLEAR_HASH(s); ***/
- zero(s.head); // Fill with NIL (= 0);
- s.strstart = 0;
- s.block_start = 0;
- s.insert = 0;
- }
- /* use the tail */
- // dictionary = dictionary.slice(dictLength - s.w_size);
- let tmpDict = new Uint8Array(s.w_size);
- tmpDict.set(dictionary.subarray(dictLength - s.w_size, dictLength), 0);
- dictionary = tmpDict;
- dictLength = s.w_size;
- }
- /* insert dictionary into window and hash */
- const avail = strm.avail_in;
- const next = strm.next_in;
- const input = strm.input;
- strm.avail_in = dictLength;
- strm.next_in = 0;
- strm.input = dictionary;
- fill_window(s);
- while (s.lookahead >= MIN_MATCH) {
- let str = s.strstart;
- let n = s.lookahead - (MIN_MATCH - 1);
- do {
- /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */
- s.ins_h = HASH(s, s.ins_h, s.window[str + MIN_MATCH - 1]);
-
- s.prev[str & s.w_mask] = s.head[s.ins_h];
-
- s.head[s.ins_h] = str;
- str++;
- } while (--n);
- s.strstart = str;
- s.lookahead = MIN_MATCH - 1;
- fill_window(s);
- }
- s.strstart += s.lookahead;
- s.block_start = s.strstart;
- s.insert = s.lookahead;
- s.lookahead = 0;
- s.match_length = s.prev_length = MIN_MATCH - 1;
- s.match_available = 0;
- strm.next_in = next;
- strm.input = input;
- strm.avail_in = avail;
- s.wrap = wrap;
- return Z_OK;
-};
-
-var deflateInfo = 'pako deflate (from Nodeca project)'
-
-export {
- deflateInit,deflateInit2,deflateReset,deflateResetKeep,deflateSetHeader,deflate,deflateEnd,deflateSetDictionary,deflateInfo
-}
-
-
-/* Not implemented
-module.exports.deflateBound = deflateBound;
-module.exports.deflateCopy = deflateCopy;
-module.exports.deflateGetDictionary = deflateGetDictionary;
-module.exports.deflateParams = deflateParams;
-module.exports.deflatePending = deflatePending;
-module.exports.deflatePrime = deflatePrime;
-module.exports.deflateTune = deflateTune;
-*/
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/gzheader.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/gzheader.js
deleted file mode 100644
index 11a52e2..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/gzheader.js
+++ /dev/null
@@ -1,58 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-function GZheader() {
- /* true if compressed data believed to be text */
- this.text = 0;
- /* modification time */
- this.time = 0;
- /* extra flags (not used when writing a gzip file) */
- this.xflags = 0;
- /* operating system */
- this.os = 0;
- /* pointer to extra field or Z_NULL if none */
- this.extra = null;
- /* extra field length (valid if extra != Z_NULL) */
- this.extra_len = 0; // Actually, we don't need it in JS,
- // but leave for few code modifications
-
- //
- // Setup limits is not necessary because in js we should not preallocate memory
- // for inflate use constant limit in 65536 bytes
- //
-
- /* space at extra (only when reading header) */
- // this.extra_max = 0;
- /* pointer to zero-terminated file name or Z_NULL */
- this.name = '';
- /* space at name (only when reading header) */
- // this.name_max = 0;
- /* pointer to zero-terminated comment or Z_NULL */
- this.comment = '';
- /* space at comment (only when reading header) */
- // this.comm_max = 0;
- /* true if there was or will be a header crc */
- this.hcrc = 0;
- /* true when done reading gzip header (not used when writing a gzip file) */
- this.done = false;
-}
-
-export {GZheader};
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inffast.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inffast.js
deleted file mode 100644
index ad001d8..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inffast.js
+++ /dev/null
@@ -1,344 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-// See state defs from inflate.js
-const BAD = 16209; /* got a data error -- remain here until reset */
-const TYPE = 16191; /* i: waiting for type bits, including last-flag bit */
-
-/*
- Decode literal, length, and distance codes and write out the resulting
- literal and match bytes until either not enough input or output is
- available, an end-of-block is encountered, or a data error is encountered.
- When large enough input and output buffers are supplied to inflate(), for
- example, a 16K input buffer and a 64K output buffer, more than 95% of the
- inflate execution time is spent in this routine.
-
- Entry assumptions:
-
- state.mode === LEN
- strm.avail_in >= 6
- strm.avail_out >= 258
- start >= strm.avail_out
- state.bits < 8
-
- On return, state.mode is one of:
-
- LEN -- ran out of enough output space or enough available input
- TYPE -- reached end of block code, inflate() to interpret next block
- BAD -- error in block data
-
- Notes:
-
- - The maximum input bits used by a length/distance pair is 15 bits for the
- length code, 5 bits for the length extra, 15 bits for the distance code,
- and 13 bits for the distance extra. This totals 48 bits, or six bytes.
- Therefore if strm.avail_in >= 6, then there is enough input to avoid
- checking for available input while decoding.
-
- - The maximum bytes that a single length/distance pair can output is 258
- bytes, which is the maximum length that can be coded. inflate_fast()
- requires strm.avail_out >= 258 for each loop to avoid checking for
- output space.
- */
-export function inflate_fast(strm, start) {
- let _in; /* local strm.input */
- let last; /* have enough input while in < last */
- let _out; /* local strm.output */
- let beg; /* inflate()'s initial strm.output */
- let end; /* while out < end, enough space available */
-//#ifdef INFLATE_STRICT
- let dmax; /* maximum distance from zlib header */
-//#endif
- let wsize; /* window size or zero if not using window */
- let whave; /* valid bytes in the window */
- let wnext; /* window write index */
- // Use `s_window` instead `window`, avoid conflict with instrumentation tools
- let s_window; /* allocated sliding window, if wsize != 0 */
- let hold; /* local strm.hold */
- let bits; /* local strm.bits */
- let lcode; /* local strm.lencode */
- let dcode; /* local strm.distcode */
- let lmask; /* mask for first level of length codes */
- let dmask; /* mask for first level of distance codes */
- let here; /* retrieved table entry */
- let op; /* code bits, operation, extra bits, or */
- /* window position, window bytes to copy */
- let len; /* match length, unused bytes */
- let dist; /* match distance */
- let from; /* where to copy match from */
- let from_source;
-
-
- let input, output; // JS specific, because we have no pointers
-
- /* copy state to local variables */
- const state = strm.state;
- //here = state.here;
- _in = strm.next_in;
- input = strm.input;
- last = _in + (strm.avail_in - 5);
- _out = strm.next_out;
- output = strm.output;
- beg = _out - (start - strm.avail_out);
- end = _out + (strm.avail_out - 257);
-//#ifdef INFLATE_STRICT
- dmax = state.dmax;
-//#endif
- wsize = state.wsize;
- whave = state.whave;
- wnext = state.wnext;
- s_window = state.window;
- hold = state.hold;
- bits = state.bits;
- lcode = state.lencode;
- dcode = state.distcode;
- lmask = (1 << state.lenbits) - 1;
- dmask = (1 << state.distbits) - 1;
-
-
- /* decode literals and length/distances until end-of-block or not enough
- input data or output space */
-
- top:
- do {
- if (bits < 15) {
- hold += input[_in++] << bits;
- bits += 8;
- hold += input[_in++] << bits;
- bits += 8;
- }
-
- here = lcode[hold & lmask];
-
- dolen:
- for (;;) { // Goto emulation
- op = here >>> 24/*here.bits*/;
- hold >>>= op;
- bits -= op;
- op = (here >>> 16) & 0xff/*here.op*/;
- if (op === 0) { /* literal */
- //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?
- // "inflate: literal '%c'\n" :
- // "inflate: literal 0x%02x\n", here.val));
- output[_out++] = here & 0xffff/*here.val*/;
- }
- else if (op & 16) { /* length base */
- len = here & 0xffff/*here.val*/;
- op &= 15; /* number of extra bits */
- if (op) {
- if (bits < op) {
- hold += input[_in++] << bits;
- bits += 8;
- }
- len += hold & ((1 << op) - 1);
- hold >>>= op;
- bits -= op;
- }
- //Tracevv((stderr, "inflate: length %u\n", len));
- if (bits < 15) {
- hold += input[_in++] << bits;
- bits += 8;
- hold += input[_in++] << bits;
- bits += 8;
- }
- here = dcode[hold & dmask];
-
- dodist:
- for (;;) { // goto emulation
- op = here >>> 24/*here.bits*/;
- hold >>>= op;
- bits -= op;
- op = (here >>> 16) & 0xff/*here.op*/;
-
- if (op & 16) { /* distance base */
- dist = here & 0xffff/*here.val*/;
- op &= 15; /* number of extra bits */
- if (bits < op) {
- hold += input[_in++] << bits;
- bits += 8;
- if (bits < op) {
- hold += input[_in++] << bits;
- bits += 8;
- }
- }
- dist += hold & ((1 << op) - 1);
-//#ifdef INFLATE_STRICT
- if (dist > dmax) {
- strm.msg = 'invalid distance too far back';
- state.mode = BAD;
- break top;
- }
-//#endif
- hold >>>= op;
- bits -= op;
- //Tracevv((stderr, "inflate: distance %u\n", dist));
- op = _out - beg; /* max distance in output */
- if (dist > op) { /* see if copy from window */
- op = dist - op; /* distance back in window */
- if (op > whave) {
- if (state.sane) {
- strm.msg = 'invalid distance too far back';
- state.mode = BAD;
- break top;
- }
-
-// (!) This block is disabled in zlib defaults,
-// don't enable it for binary compatibility
-//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR
-// if (len <= op - whave) {
-// do {
-// output[_out++] = 0;
-// } while (--len);
-// continue top;
-// }
-// len -= op - whave;
-// do {
-// output[_out++] = 0;
-// } while (--op > whave);
-// if (op === 0) {
-// from = _out - dist;
-// do {
-// output[_out++] = output[from++];
-// } while (--len);
-// continue top;
-// }
-//#endif
- }
- from = 0; // window index
- from_source = s_window;
- if (wnext === 0) { /* very common case */
- from += wsize - op;
- if (op < len) { /* some from window */
- len -= op;
- do {
- output[_out++] = s_window[from++];
- } while (--op);
- from = _out - dist; /* rest from output */
- from_source = output;
- }
- }
- else if (wnext < op) { /* wrap around window */
- from += wsize + wnext - op;
- op -= wnext;
- if (op < len) { /* some from end of window */
- len -= op;
- do {
- output[_out++] = s_window[from++];
- } while (--op);
- from = 0;
- if (wnext < len) { /* some from start of window */
- op = wnext;
- len -= op;
- do {
- output[_out++] = s_window[from++];
- } while (--op);
- from = _out - dist; /* rest from output */
- from_source = output;
- }
- }
- }
- else { /* contiguous in window */
- from += wnext - op;
- if (op < len) { /* some from window */
- len -= op;
- do {
- output[_out++] = s_window[from++];
- } while (--op);
- from = _out - dist; /* rest from output */
- from_source = output;
- }
- }
- while (len > 2) {
- output[_out++] = from_source[from++];
- output[_out++] = from_source[from++];
- output[_out++] = from_source[from++];
- len -= 3;
- }
- if (len) {
- output[_out++] = from_source[from++];
- if (len > 1) {
- output[_out++] = from_source[from++];
- }
- }
- }
- else {
- from = _out - dist; /* copy direct from output */
- do { /* minimum length is three */
- output[_out++] = output[from++];
- output[_out++] = output[from++];
- output[_out++] = output[from++];
- len -= 3;
- } while (len > 2);
- if (len) {
- output[_out++] = output[from++];
- if (len > 1) {
- output[_out++] = output[from++];
- }
- }
- }
- }
- else if ((op & 64) === 0) { /* 2nd level distance code */
- here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))];
- continue dodist;
- }
- else {
- strm.msg = 'invalid distance code';
- state.mode = BAD;
- break top;
- }
-
- break; // need to emulate goto via "continue"
- }
- }
- else if ((op & 64) === 0) { /* 2nd level length code */
- here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))];
- continue dolen;
- }
- else if (op & 32) { /* end-of-block */
- //Tracevv((stderr, "inflate: end of block\n"));
- state.mode = TYPE;
- break top;
- }
- else {
- strm.msg = 'invalid literal/length code';
- state.mode = BAD;
- break top;
- }
-
- break; // need to emulate goto via "continue"
- }
- } while (_in < last && _out < end);
-
- /* return unused bytes (on entry, bits < 8, so in won't go too far back) */
- len = bits >> 3;
- _in -= len;
- bits -= len << 3;
- hold &= (1 << bits) - 1;
-
- /* update state and return */
- strm.next_in = _in;
- strm.next_out = _out;
- strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last));
- strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end));
- state.hold = hold;
- state.bits = bits;
- return;
-};
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inflate.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inflate.js
deleted file mode 100644
index 86d5df4..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inflate.js
+++ /dev/null
@@ -1,1571 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-import adler32 from './adler32';
-import crc32 from './crc32';
-import {inflate_fast} from './inffast';
-import {inflate_table} from './inftrees';
-
-const CODES = 0;
-const LENS = 1;
-const DISTS = 2;
-
-/* Public constants ==========================================================*/
-/* ===========================================================================*/
-import constants from './constants'
-var Z_FINISH = constants.Z_FINISH
-var Z_BLOCK = constants.Z_BLOCK
-var Z_TREES = constants.Z_TREES
-var Z_OK = constants.Z_OK
-var Z_STREAM_END = constants.Z_STREAM_END
-var Z_NEED_DICT = constants.Z_NEED_DICT
-var Z_STREAM_ERROR = constants.Z_STREAM_ERROR
-var Z_DATA_ERROR = constants.Z_DATA_ERROR
-var Z_MEM_ERROR = constants.Z_MEM_ERROR
-var Z_BUF_ERROR = constants.Z_BUF_ERROR
-var Z_DEFLATED = constants.Z_DEFLATED
-
-
-
-/* STATES ====================================================================*/
-/* ===========================================================================*/
-
-
-const HEAD = 16180; /* i: waiting for magic header */
-const FLAGS = 16181; /* i: waiting for method and flags (gzip) */
-const TIME = 16182; /* i: waiting for modification time (gzip) */
-const OS = 16183; /* i: waiting for extra flags and operating system (gzip) */
-const EXLEN = 16184; /* i: waiting for extra length (gzip) */
-const EXTRA = 16185; /* i: waiting for extra bytes (gzip) */
-const NAME = 16186; /* i: waiting for end of file name (gzip) */
-const COMMENT = 16187; /* i: waiting for end of comment (gzip) */
-const HCRC = 16188; /* i: waiting for header crc (gzip) */
-const DICTID = 16189; /* i: waiting for dictionary check value */
-const DICT = 16190; /* waiting for inflateSetDictionary() call */
-const TYPE = 16191; /* i: waiting for type bits, including last-flag bit */
-const TYPEDO = 16192; /* i: same, but skip check to exit inflate on new block */
-const STORED = 16193; /* i: waiting for stored size (length and complement) */
-const COPY_ = 16194; /* i/o: same as COPY below, but only first time in */
-const COPY = 16195; /* i/o: waiting for input or output to copy stored block */
-const TABLE = 16196; /* i: waiting for dynamic block table lengths */
-const LENLENS = 16197; /* i: waiting for code length code lengths */
-const CODELENS = 16198; /* i: waiting for length/lit and distance code lengths */
-const LEN_ = 16199; /* i: same as LEN below, but only first time in */
-const LEN = 16200; /* i: waiting for length/lit/eob code */
-const LENEXT = 16201; /* i: waiting for length extra bits */
-const DIST = 16202; /* i: waiting for distance code */
-const DISTEXT = 16203; /* i: waiting for distance extra bits */
-const MATCH = 16204; /* o: waiting for output space to copy string */
-const LIT = 16205; /* o: waiting for output space to write literal */
-const CHECK = 16206; /* i: waiting for 32-bit check value */
-const LENGTH = 16207; /* i: waiting for 32-bit length (gzip) */
-const DONE = 16208; /* finished check, done -- remain here until reset */
-const BAD = 16209; /* got a data error -- remain here until reset */
-const MEM = 16210; /* got an inflate() memory error -- remain here until reset */
-const SYNC = 16211; /* looking for synchronization bytes to restart inflate() */
-
-/* ===========================================================================*/
-
-
-
-const ENOUGH_LENS = 852;
-const ENOUGH_DISTS = 592;
-//const ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS);
-
-const MAX_WBITS = 15;
-/* 32K LZ77 window */
-const DEF_WBITS = MAX_WBITS;
-
-
-const zswap32 = (q) => {
-
- return (((q >>> 24) & 0xff) +
- ((q >>> 8) & 0xff00) +
- ((q & 0xff00) << 8) +
- ((q & 0xff) << 24));
-};
-
-
-function InflateState() {
- this.strm = null; /* pointer back to this zlib stream */
- this.mode = 0; /* current inflate mode */
- this.last = false; /* true if processing last block */
- this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip,
- bit 2 true to validate check value */
- this.havedict = false; /* true if dictionary provided */
- this.flags = 0; /* gzip header method and flags (0 if zlib), or
- -1 if raw or no header yet */
- this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */
- this.check = 0; /* protected copy of check value */
- this.total = 0; /* protected copy of output count */
- // TODO: may be {}
- this.head = null; /* where to save gzip header information */
-
- /* sliding window */
- this.wbits = 0; /* log base 2 of requested window size */
- this.wsize = 0; /* window size or zero if not using window */
- this.whave = 0; /* valid bytes in the window */
- this.wnext = 0; /* window write index */
- this.window = null; /* allocated sliding window, if needed */
-
- /* bit accumulator */
- this.hold = 0; /* input bit accumulator */
- this.bits = 0; /* number of bits in "in" */
-
- /* for string and stored block copying */
- this.length = 0; /* literal or length of data to copy */
- this.offset = 0; /* distance back to copy string from */
-
- /* for table and code decoding */
- this.extra = 0; /* extra bits needed */
-
- /* fixed and dynamic code tables */
- this.lencode = null; /* starting table for length/literal codes */
- this.distcode = null; /* starting table for distance codes */
- this.lenbits = 0; /* index bits for lencode */
- this.distbits = 0; /* index bits for distcode */
-
- /* dynamic table building */
- this.ncode = 0; /* number of code length code lengths */
- this.nlen = 0; /* number of length code lengths */
- this.ndist = 0; /* number of distance code lengths */
- this.have = 0; /* number of code lengths in lens[] */
- this.next = null; /* next available space in codes[] */
-
- this.lens = new Uint16Array(320); /* temporary storage for code lengths */
- this.work = new Uint16Array(288); /* work area for code table building */
-
- /*
- because we don't have pointers in js, we use lencode and distcode directly
- as buffers so we don't need codes
- */
- //this.codes = new Int32Array(ENOUGH); /* space for code tables */
- this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */
- this.distdyn = null; /* dynamic table for distance codes (JS specific) */
- this.sane = 0; /* if false, allow invalid distance too far */
- this.back = 0; /* bits back of last unprocessed length/lit */
- this.was = 0; /* initial length of match */
-}
-
-
-const inflateStateCheck = (strm) => {
-
- if (!strm) {
- return 1;
- }
- const state = strm.state;
- if (!state || state.strm !== strm ||
- state.mode < HEAD || state.mode > SYNC) {
- return 1;
- }
- return 0;
-};
-
-
-const inflateResetKeep = (strm) => {
-
- if (inflateStateCheck(strm)) { return Z_STREAM_ERROR; }
- const state = strm.state;
- strm.total_in = strm.total_out = state.total = 0;
- strm.msg = ''; /*Z_NULL*/
- if (state.wrap) { /* to support ill-conceived Java test suite */
- strm.adler = state.wrap & 1;
- }
- state.mode = HEAD;
- state.last = 0;
- state.havedict = 0;
- state.flags = -1;
- state.dmax = 32768;
- state.head = null/*Z_NULL*/;
- state.hold = 0;
- state.bits = 0;
- //state.lencode = state.distcode = state.next = state.codes;
- state.lencode = state.lendyn = new Int32Array(ENOUGH_LENS);
- state.distcode = state.distdyn = new Int32Array(ENOUGH_DISTS);
-
- state.sane = 1;
- state.back = -1;
- //Tracev((stderr, "inflate: reset\n"));
- return Z_OK;
-};
-
-
-const inflateReset = (strm) => {
-
- if (inflateStateCheck(strm)) { return Z_STREAM_ERROR; }
- const state = strm.state;
- state.wsize = 0;
- state.whave = 0;
- state.wnext = 0;
- return inflateResetKeep(strm);
-
-};
-
-
-const inflateReset2 = (strm, windowBits) => {
- let wrap;
-
- /* get the state */
- if (inflateStateCheck(strm)) { return Z_STREAM_ERROR; }
- const state = strm.state;
-
- /* extract wrap request from windowBits parameter */
- if (windowBits < 0) {
- wrap = 0;
- windowBits = -windowBits;
- }
- else {
- wrap = (windowBits >> 4) + 5;
- if (windowBits < 48) {
- windowBits &= 15;
- }
- }
-
- /* set number of window bits, free window if different */
- if (windowBits && (windowBits < 8 || windowBits > 15)) {
- return Z_STREAM_ERROR;
- }
- if (state.window !== null && state.wbits !== windowBits) {
- state.window = null;
- }
-
- /* update state and reset the rest of it */
- state.wrap = wrap;
- state.wbits = windowBits;
- return inflateReset(strm);
-};
-
-
-const inflateInit2 = (strm, windowBits) => {
-
- if (!strm) { return Z_STREAM_ERROR; }
- //strm.msg = Z_NULL; /* in case we return an error */
-
- const state = new InflateState();
-
- //if (state === Z_NULL) return Z_MEM_ERROR;
- //Tracev((stderr, "inflate: allocated\n"));
- strm.state = state;
- state.strm = strm;
- state.window = null/*Z_NULL*/;
- state.mode = HEAD; /* to pass state test in inflateReset2() */
- const ret = inflateReset2(strm, windowBits);
- if (ret !== Z_OK) {
- strm.state = null/*Z_NULL*/;
- }
- return ret;
-};
-
-
-const inflateInit = (strm) => {
-
- return inflateInit2(strm, DEF_WBITS);
-};
-
-
-/*
- Return state with length and distance decoding tables and index sizes set to
- fixed code decoding. Normally this returns fixed tables from inffixed.h.
- If BUILDFIXED is defined, then instead this routine builds the tables the
- first time it's called, and returns those tables the first time and
- thereafter. This reduces the size of the code by about 2K bytes, in
- exchange for a little execution time. However, BUILDFIXED should not be
- used for threaded applications, since the rewriting of the tables and virgin
- may not be thread-safe.
- */
-let virgin = true;
-
-let lenfix, distfix; // We have no pointers in JS, so keep tables separate
-
-
-const fixedtables = (state) => {
-
- /* build fixed huffman tables if first call (may not be thread safe) */
- if (virgin) {
- lenfix = new Int32Array(512);
- distfix = new Int32Array(32);
-
- /* literal/length table */
- let sym = 0;
- while (sym < 144) { state.lens[sym++] = 8; }
- while (sym < 256) { state.lens[sym++] = 9; }
- while (sym < 280) { state.lens[sym++] = 7; }
- while (sym < 288) { state.lens[sym++] = 8; }
-
- inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 });
-
- /* distance table */
- sym = 0;
- while (sym < 32) { state.lens[sym++] = 5; }
-
- inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 });
-
- /* do this just once */
- virgin = false;
- }
-
- state.lencode = lenfix;
- state.lenbits = 9;
- state.distcode = distfix;
- state.distbits = 5;
-};
-
-
-/*
- Update the window with the last wsize (normally 32K) bytes written before
- returning. If window does not exist yet, create it. This is only called
- when a window is already in use, or when output has been written during this
- inflate call, but the end of the deflate stream has not been reached yet.
- It is also called to create a window for dictionary data when a dictionary
- is loaded.
-
- Providing output buffers larger than 32K to inflate() should provide a speed
- advantage, since only the last 32K of output is copied to the sliding window
- upon return from inflate(), and since all distances after the first 32K of
- output will fall in the output data, making match copies simpler and faster.
- The advantage may be dependent on the size of the processor's data caches.
- */
-const updatewindow = (strm, src, end, copy) => {
-
- let dist;
- const state = strm.state;
-
- /* if it hasn't been done already, allocate space for the window */
- if (state.window === null) {
- state.wsize = 1 << state.wbits;
- state.wnext = 0;
- state.whave = 0;
-
- state.window = new Uint8Array(state.wsize);
- }
-
- /* copy state->wsize or less output bytes into the circular window */
- if (copy >= state.wsize) {
- state.window.set(src.subarray(end - state.wsize, end), 0);
- state.wnext = 0;
- state.whave = state.wsize;
- }
- else {
- dist = state.wsize - state.wnext;
- if (dist > copy) {
- dist = copy;
- }
- //zmemcpy(state->window + state->wnext, end - copy, dist);
- state.window.set(src.subarray(end - copy, end - copy + dist), state.wnext);
- copy -= dist;
- if (copy) {
- //zmemcpy(state->window, end - copy, copy);
- state.window.set(src.subarray(end - copy, end), 0);
- state.wnext = copy;
- state.whave = state.wsize;
- }
- else {
- state.wnext += dist;
- if (state.wnext === state.wsize) { state.wnext = 0; }
- if (state.whave < state.wsize) { state.whave += dist; }
- }
- }
- return 0;
-};
-
-
-const inflate = (strm, flush) => {
-
- let state;
- let input, output; // input/output buffers
- let next; /* next input INDEX */
- let put; /* next output INDEX */
- let have, left; /* available input and output */
- let hold; /* bit buffer */
- let bits; /* bits in bit buffer */
- let _in, _out; /* save starting available input and output */
- let copy; /* number of stored or match bytes to copy */
- let from; /* where to copy match bytes from */
- let from_source;
- let here = 0; /* current decoding table entry */
- let here_bits, here_op, here_val; // paked "here" denormalized (JS specific)
- //let last; /* parent table entry */
- let last_bits, last_op, last_val; // paked "last" denormalized (JS specific)
- let len; /* length to copy for repeats, bits to drop */
- let ret; /* return code */
- const hbuf = new Uint8Array(4); /* buffer for gzip header crc calculation */
- let opts;
-
- let n; // temporary variable for NEED_BITS
-
- const order = /* permutation of code lengths */
- new Uint8Array([ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]);
-
-
- if (inflateStateCheck(strm) || !strm.output ||
- (!strm.input && strm.avail_in !== 0)) {
- return Z_STREAM_ERROR;
- }
-
- state = strm.state;
- if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */
-
-
- //--- LOAD() ---
- put = strm.next_out;
- output = strm.output;
- left = strm.avail_out;
- next = strm.next_in;
- input = strm.input;
- have = strm.avail_in;
- hold = state.hold;
- bits = state.bits;
- //---
-
- _in = have;
- _out = left;
- ret = Z_OK;
-
- inf_leave: // goto emulation
- for (;;) {
- switch (state.mode) {
- case HEAD:
- if (state.wrap === 0) {
- state.mode = TYPEDO;
- break;
- }
- //=== NEEDBITS(16);
- while (bits < 16) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */
- if (state.wbits === 0) {
- state.wbits = 15;
- }
- state.check = 0/*crc32(0L, Z_NULL, 0)*/;
- //=== CRC2(state.check, hold);
- hbuf[0] = hold & 0xff;
- hbuf[1] = (hold >>> 8) & 0xff;
- state.check = crc32(state.check, hbuf, 2, 0);
- //===//
-
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- state.mode = FLAGS;
- break;
- }
- if (state.head) {
- state.head.done = false;
- }
- if (!(state.wrap & 1) || /* check if zlib header allowed */
- (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) {
- strm.msg = 'incorrect header check';
- state.mode = BAD;
- break;
- }
- if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) {
- strm.msg = 'unknown compression method';
- state.mode = BAD;
- break;
- }
- //--- DROPBITS(4) ---//
- hold >>>= 4;
- bits -= 4;
- //---//
- len = (hold & 0x0f)/*BITS(4)*/ + 8;
- if (state.wbits === 0) {
- state.wbits = len;
- }
- if (len > 15 || len > state.wbits) {
- strm.msg = 'invalid window size';
- state.mode = BAD;
- break;
- }
-
- // !!! pako patch. Force use `options.windowBits` if passed.
- // Required to always use max window size by default.
- state.dmax = 1 << state.wbits;
- //state.dmax = 1 << len;
-
- state.flags = 0; /* indicate zlib header */
- //Tracev((stderr, "inflate: zlib header ok\n"));
- strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/;
- state.mode = hold & 0x200 ? DICTID : TYPE;
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- break;
- case FLAGS:
- //=== NEEDBITS(16); */
- while (bits < 16) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- state.flags = hold;
- if ((state.flags & 0xff) !== Z_DEFLATED) {
- strm.msg = 'unknown compression method';
- state.mode = BAD;
- break;
- }
- if (state.flags & 0xe000) {
- strm.msg = 'unknown header flags set';
- state.mode = BAD;
- break;
- }
- if (state.head) {
- state.head.text = ((hold >> 8) & 1);
- }
- if ((state.flags & 0x0200) && (state.wrap & 4)) {
- //=== CRC2(state.check, hold);
- hbuf[0] = hold & 0xff;
- hbuf[1] = (hold >>> 8) & 0xff;
- state.check = crc32(state.check, hbuf, 2, 0);
- //===//
- }
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- state.mode = TIME;
- /* falls through */
- case TIME:
- //=== NEEDBITS(32); */
- while (bits < 32) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- if (state.head) {
- state.head.time = hold;
- }
- if ((state.flags & 0x0200) && (state.wrap & 4)) {
- //=== CRC4(state.check, hold)
- hbuf[0] = hold & 0xff;
- hbuf[1] = (hold >>> 8) & 0xff;
- hbuf[2] = (hold >>> 16) & 0xff;
- hbuf[3] = (hold >>> 24) & 0xff;
- state.check = crc32(state.check, hbuf, 4, 0);
- //===
- }
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- state.mode = OS;
- /* falls through */
- case OS:
- //=== NEEDBITS(16); */
- while (bits < 16) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- if (state.head) {
- state.head.xflags = (hold & 0xff);
- state.head.os = (hold >> 8);
- }
- if ((state.flags & 0x0200) && (state.wrap & 4)) {
- //=== CRC2(state.check, hold);
- hbuf[0] = hold & 0xff;
- hbuf[1] = (hold >>> 8) & 0xff;
- state.check = crc32(state.check, hbuf, 2, 0);
- //===//
- }
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- state.mode = EXLEN;
- /* falls through */
- case EXLEN:
- if (state.flags & 0x0400) {
- //=== NEEDBITS(16); */
- while (bits < 16) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- state.length = hold;
- if (state.head) {
- state.head.extra_len = hold;
- }
- if ((state.flags & 0x0200) && (state.wrap & 4)) {
- //=== CRC2(state.check, hold);
- hbuf[0] = hold & 0xff;
- hbuf[1] = (hold >>> 8) & 0xff;
- state.check = crc32(state.check, hbuf, 2, 0);
- //===//
- }
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- }
- else if (state.head) {
- state.head.extra = null/*Z_NULL*/;
- }
- state.mode = EXTRA;
- /* falls through */
- case EXTRA:
- if (state.flags & 0x0400) {
- copy = state.length;
- if (copy > have) { copy = have; }
- if (copy) {
- if (state.head) {
- len = state.head.extra_len - state.length;
- if (!state.head.extra) {
- // Use untyped array for more convenient processing later
- state.head.extra = new Uint8Array(state.head.extra_len);
- }
- state.head.extra.set(
- input.subarray(
- next,
- // extra field is limited to 65536 bytes
- // - no need for additional size check
- next + copy
- ),
- /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/
- len
- );
- //zmemcpy(state.head.extra + len, next,
- // len + copy > state.head.extra_max ?
- // state.head.extra_max - len : copy);
- }
- if ((state.flags & 0x0200) && (state.wrap & 4)) {
- state.check = crc32(state.check, input, copy, next);
- }
- have -= copy;
- next += copy;
- state.length -= copy;
- }
- if (state.length) { break inf_leave; }
- }
- state.length = 0;
- state.mode = NAME;
- /* falls through */
- case NAME:
- if (state.flags & 0x0800) {
- if (have === 0) { break inf_leave; }
- copy = 0;
- do {
- // TODO: 2 or 1 bytes?
- len = input[next + copy++];
- /* use constant limit because in js we should not preallocate memory */
- if (state.head && len &&
- (state.length < 65536 /*state.head.name_max*/)) {
- state.head.name += String.fromCharCode(len);
- }
- } while (len && copy < have);
-
- if ((state.flags & 0x0200) && (state.wrap & 4)) {
- state.check = crc32(state.check, input, copy, next);
- }
- have -= copy;
- next += copy;
- if (len) { break inf_leave; }
- }
- else if (state.head) {
- state.head.name = null;
- }
- state.length = 0;
- state.mode = COMMENT;
- /* falls through */
- case COMMENT:
- if (state.flags & 0x1000) {
- if (have === 0) { break inf_leave; }
- copy = 0;
- do {
- len = input[next + copy++];
- /* use constant limit because in js we should not preallocate memory */
- if (state.head && len &&
- (state.length < 65536 /*state.head.comm_max*/)) {
- state.head.comment += String.fromCharCode(len);
- }
- } while (len && copy < have);
- if ((state.flags & 0x0200) && (state.wrap & 4)) {
- state.check = crc32(state.check, input, copy, next);
- }
- have -= copy;
- next += copy;
- if (len) { break inf_leave; }
- }
- else if (state.head) {
- state.head.comment = null;
- }
- state.mode = HCRC;
- /* falls through */
- case HCRC:
- if (state.flags & 0x0200) {
- //=== NEEDBITS(16); */
- while (bits < 16) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- if ((state.wrap & 4) && hold !== (state.check & 0xffff)) {
- strm.msg = 'header crc mismatch';
- state.mode = BAD;
- break;
- }
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- }
- if (state.head) {
- state.head.hcrc = ((state.flags >> 9) & 1);
- state.head.done = true;
- }
- strm.adler = state.check = 0;
- state.mode = TYPE;
- break;
- case DICTID:
- //=== NEEDBITS(32); */
- while (bits < 32) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- strm.adler = state.check = zswap32(hold);
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- state.mode = DICT;
- /* falls through */
- case DICT:
- if (state.havedict === 0) {
- //--- RESTORE() ---
- strm.next_out = put;
- strm.avail_out = left;
- strm.next_in = next;
- strm.avail_in = have;
- state.hold = hold;
- state.bits = bits;
- //---
- return Z_NEED_DICT;
- }
- strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/;
- state.mode = TYPE;
- /* falls through */
- case TYPE:
- if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; }
- /* falls through */
- case TYPEDO:
- if (state.last) {
- //--- BYTEBITS() ---//
- hold >>>= bits & 7;
- bits -= bits & 7;
- //---//
- state.mode = CHECK;
- break;
- }
- //=== NEEDBITS(3); */
- while (bits < 3) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- state.last = (hold & 0x01)/*BITS(1)*/;
- //--- DROPBITS(1) ---//
- hold >>>= 1;
- bits -= 1;
- //---//
-
- switch ((hold & 0x03)/*BITS(2)*/) {
- case 0: /* stored block */
- //Tracev((stderr, "inflate: stored block%s\n",
- // state.last ? " (last)" : ""));
- state.mode = STORED;
- break;
- case 1: /* fixed block */
- fixedtables(state);
- //Tracev((stderr, "inflate: fixed codes block%s\n",
- // state.last ? " (last)" : ""));
- state.mode = LEN_; /* decode codes */
- if (flush === Z_TREES) {
- //--- DROPBITS(2) ---//
- hold >>>= 2;
- bits -= 2;
- //---//
- break inf_leave;
- }
- break;
- case 2: /* dynamic block */
- //Tracev((stderr, "inflate: dynamic codes block%s\n",
- // state.last ? " (last)" : ""));
- state.mode = TABLE;
- break;
- case 3:
- strm.msg = 'invalid block type';
- state.mode = BAD;
- }
- //--- DROPBITS(2) ---//
- hold >>>= 2;
- bits -= 2;
- //---//
- break;
- case STORED:
- //--- BYTEBITS() ---// /* go to byte boundary */
- hold >>>= bits & 7;
- bits -= bits & 7;
- //---//
- //=== NEEDBITS(32); */
- while (bits < 32) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) {
- strm.msg = 'invalid stored block lengths';
- state.mode = BAD;
- break;
- }
- state.length = hold & 0xffff;
- //Tracev((stderr, "inflate: stored length %u\n",
- // state.length));
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- state.mode = COPY_;
- if (flush === Z_TREES) { break inf_leave; }
- /* falls through */
- case COPY_:
- state.mode = COPY;
- /* falls through */
- case COPY:
- copy = state.length;
- if (copy) {
- if (copy > have) { copy = have; }
- if (copy > left) { copy = left; }
- if (copy === 0) { break inf_leave; }
- //--- zmemcpy(put, next, copy); ---
- output.set(input.subarray(next, next + copy), put);
- //---//
- have -= copy;
- next += copy;
- left -= copy;
- put += copy;
- state.length -= copy;
- break;
- }
- //Tracev((stderr, "inflate: stored end\n"));
- state.mode = TYPE;
- break;
- case TABLE:
- //=== NEEDBITS(14); */
- while (bits < 14) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257;
- //--- DROPBITS(5) ---//
- hold >>>= 5;
- bits -= 5;
- //---//
- state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1;
- //--- DROPBITS(5) ---//
- hold >>>= 5;
- bits -= 5;
- //---//
- state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4;
- //--- DROPBITS(4) ---//
- hold >>>= 4;
- bits -= 4;
- //---//
-//#ifndef PKZIP_BUG_WORKAROUND
- if (state.nlen > 286 || state.ndist > 30) {
- strm.msg = 'too many length or distance symbols';
- state.mode = BAD;
- break;
- }
-//#endif
- //Tracev((stderr, "inflate: table sizes ok\n"));
- state.have = 0;
- state.mode = LENLENS;
- /* falls through */
- case LENLENS:
- while (state.have < state.ncode) {
- //=== NEEDBITS(3);
- while (bits < 3) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- state.lens[order[state.have++]] = (hold & 0x07);//BITS(3);
- //--- DROPBITS(3) ---//
- hold >>>= 3;
- bits -= 3;
- //---//
- }
- while (state.have < 19) {
- state.lens[order[state.have++]] = 0;
- }
- // We have separate tables & no pointers. 2 commented lines below not needed.
- //state.next = state.codes;
- //state.lencode = state.next;
- // Switch to use dynamic table
- state.lencode = state.lendyn;
- state.lenbits = 7;
-
- opts = { bits: state.lenbits };
- ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts);
- state.lenbits = opts.bits;
-
- if (ret) {
- strm.msg = 'invalid code lengths set';
- state.mode = BAD;
- break;
- }
- //Tracev((stderr, "inflate: code lengths ok\n"));
- state.have = 0;
- state.mode = CODELENS;
- /* falls through */
- case CODELENS:
- while (state.have < state.nlen + state.ndist) {
- for (;;) {
- here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/
- here_bits = here >>> 24;
- here_op = (here >>> 16) & 0xff;
- here_val = here & 0xffff;
-
- if ((here_bits) <= bits) { break; }
- //--- PULLBYTE() ---//
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- //---//
- }
- if (here_val < 16) {
- //--- DROPBITS(here.bits) ---//
- hold >>>= here_bits;
- bits -= here_bits;
- //---//
- state.lens[state.have++] = here_val;
- }
- else {
- if (here_val === 16) {
- //=== NEEDBITS(here.bits + 2);
- n = here_bits + 2;
- while (bits < n) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- //--- DROPBITS(here.bits) ---//
- hold >>>= here_bits;
- bits -= here_bits;
- //---//
- if (state.have === 0) {
- strm.msg = 'invalid bit length repeat';
- state.mode = BAD;
- break;
- }
- len = state.lens[state.have - 1];
- copy = 3 + (hold & 0x03);//BITS(2);
- //--- DROPBITS(2) ---//
- hold >>>= 2;
- bits -= 2;
- //---//
- }
- else if (here_val === 17) {
- //=== NEEDBITS(here.bits + 3);
- n = here_bits + 3;
- while (bits < n) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- //--- DROPBITS(here.bits) ---//
- hold >>>= here_bits;
- bits -= here_bits;
- //---//
- len = 0;
- copy = 3 + (hold & 0x07);//BITS(3);
- //--- DROPBITS(3) ---//
- hold >>>= 3;
- bits -= 3;
- //---//
- }
- else {
- //=== NEEDBITS(here.bits + 7);
- n = here_bits + 7;
- while (bits < n) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- //--- DROPBITS(here.bits) ---//
- hold >>>= here_bits;
- bits -= here_bits;
- //---//
- len = 0;
- copy = 11 + (hold & 0x7f);//BITS(7);
- //--- DROPBITS(7) ---//
- hold >>>= 7;
- bits -= 7;
- //---//
- }
- if (state.have + copy > state.nlen + state.ndist) {
- strm.msg = 'invalid bit length repeat';
- state.mode = BAD;
- break;
- }
- while (copy--) {
- state.lens[state.have++] = len;
- }
- }
- }
-
- /* handle error breaks in while */
- if (state.mode === BAD) { break; }
-
- /* check for end-of-block code (better have one) */
- if (state.lens[256] === 0) {
- strm.msg = 'invalid code -- missing end-of-block';
- state.mode = BAD;
- break;
- }
-
- /* build code tables -- note: do not change the lenbits or distbits
- values here (9 and 6) without reading the comments in inftrees.h
- concerning the ENOUGH constants, which depend on those values */
- state.lenbits = 9;
-
- opts = { bits: state.lenbits };
- ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts);
- // We have separate tables & no pointers. 2 commented lines below not needed.
- // state.next_index = opts.table_index;
- state.lenbits = opts.bits;
- // state.lencode = state.next;
-
- if (ret) {
- strm.msg = 'invalid literal/lengths set';
- state.mode = BAD;
- break;
- }
-
- state.distbits = 6;
- //state.distcode.copy(state.codes);
- // Switch to use dynamic table
- state.distcode = state.distdyn;
- opts = { bits: state.distbits };
- ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts);
- // We have separate tables & no pointers. 2 commented lines below not needed.
- // state.next_index = opts.table_index;
- state.distbits = opts.bits;
- // state.distcode = state.next;
-
- if (ret) {
- strm.msg = 'invalid distances set';
- state.mode = BAD;
- break;
- }
- //Tracev((stderr, 'inflate: codes ok\n'));
- state.mode = LEN_;
- if (flush === Z_TREES) { break inf_leave; }
- /* falls through */
- case LEN_:
- state.mode = LEN;
- /* falls through */
- case LEN:
- if (have >= 6 && left >= 258) {
- //--- RESTORE() ---
- strm.next_out = put;
- strm.avail_out = left;
- strm.next_in = next;
- strm.avail_in = have;
- state.hold = hold;
- state.bits = bits;
- //---
- inflate_fast(strm, _out);
- //--- LOAD() ---
- put = strm.next_out;
- output = strm.output;
- left = strm.avail_out;
- next = strm.next_in;
- input = strm.input;
- have = strm.avail_in;
- hold = state.hold;
- bits = state.bits;
- //---
-
- if (state.mode === TYPE) {
- state.back = -1;
- }
- break;
- }
- state.back = 0;
- for (;;) {
- here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/
- here_bits = here >>> 24;
- here_op = (here >>> 16) & 0xff;
- here_val = here & 0xffff;
-
- if (here_bits <= bits) { break; }
- //--- PULLBYTE() ---//
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- //---//
- }
- if (here_op && (here_op & 0xf0) === 0) {
- last_bits = here_bits;
- last_op = here_op;
- last_val = here_val;
- for (;;) {
- here = state.lencode[last_val +
- ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)];
- here_bits = here >>> 24;
- here_op = (here >>> 16) & 0xff;
- here_val = here & 0xffff;
-
- if ((last_bits + here_bits) <= bits) { break; }
- //--- PULLBYTE() ---//
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- //---//
- }
- //--- DROPBITS(last.bits) ---//
- hold >>>= last_bits;
- bits -= last_bits;
- //---//
- state.back += last_bits;
- }
- //--- DROPBITS(here.bits) ---//
- hold >>>= here_bits;
- bits -= here_bits;
- //---//
- state.back += here_bits;
- state.length = here_val;
- if (here_op === 0) {
- //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?
- // "inflate: literal '%c'\n" :
- // "inflate: literal 0x%02x\n", here.val));
- state.mode = LIT;
- break;
- }
- if (here_op & 32) {
- //Tracevv((stderr, "inflate: end of block\n"));
- state.back = -1;
- state.mode = TYPE;
- break;
- }
- if (here_op & 64) {
- strm.msg = 'invalid literal/length code';
- state.mode = BAD;
- break;
- }
- state.extra = here_op & 15;
- state.mode = LENEXT;
- /* falls through */
- case LENEXT:
- if (state.extra) {
- //=== NEEDBITS(state.extra);
- n = state.extra;
- while (bits < n) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/;
- //--- DROPBITS(state.extra) ---//
- hold >>>= state.extra;
- bits -= state.extra;
- //---//
- state.back += state.extra;
- }
- //Tracevv((stderr, "inflate: length %u\n", state.length));
- state.was = state.length;
- state.mode = DIST;
- /* falls through */
- case DIST:
- for (;;) {
- here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/
- here_bits = here >>> 24;
- here_op = (here >>> 16) & 0xff;
- here_val = here & 0xffff;
-
- if ((here_bits) <= bits) { break; }
- //--- PULLBYTE() ---//
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- //---//
- }
- if ((here_op & 0xf0) === 0) {
- last_bits = here_bits;
- last_op = here_op;
- last_val = here_val;
- for (;;) {
- here = state.distcode[last_val +
- ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)];
- here_bits = here >>> 24;
- here_op = (here >>> 16) & 0xff;
- here_val = here & 0xffff;
-
- if ((last_bits + here_bits) <= bits) { break; }
- //--- PULLBYTE() ---//
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- //---//
- }
- //--- DROPBITS(last.bits) ---//
- hold >>>= last_bits;
- bits -= last_bits;
- //---//
- state.back += last_bits;
- }
- //--- DROPBITS(here.bits) ---//
- hold >>>= here_bits;
- bits -= here_bits;
- //---//
- state.back += here_bits;
- if (here_op & 64) {
- strm.msg = 'invalid distance code';
- state.mode = BAD;
- break;
- }
- state.offset = here_val;
- state.extra = (here_op) & 15;
- state.mode = DISTEXT;
- /* falls through */
- case DISTEXT:
- if (state.extra) {
- //=== NEEDBITS(state.extra);
- n = state.extra;
- while (bits < n) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/;
- //--- DROPBITS(state.extra) ---//
- hold >>>= state.extra;
- bits -= state.extra;
- //---//
- state.back += state.extra;
- }
-//#ifdef INFLATE_STRICT
- if (state.offset > state.dmax) {
- strm.msg = 'invalid distance too far back';
- state.mode = BAD;
- break;
- }
-//#endif
- //Tracevv((stderr, "inflate: distance %u\n", state.offset));
- state.mode = MATCH;
- /* falls through */
- case MATCH:
- if (left === 0) { break inf_leave; }
- copy = _out - left;
- if (state.offset > copy) { /* copy from window */
- copy = state.offset - copy;
- if (copy > state.whave) {
- if (state.sane) {
- strm.msg = 'invalid distance too far back';
- state.mode = BAD;
- break;
- }
-// (!) This block is disabled in zlib defaults,
-// don't enable it for binary compatibility
-//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR
-// Trace((stderr, "inflate.c too far\n"));
-// copy -= state.whave;
-// if (copy > state.length) { copy = state.length; }
-// if (copy > left) { copy = left; }
-// left -= copy;
-// state.length -= copy;
-// do {
-// output[put++] = 0;
-// } while (--copy);
-// if (state.length === 0) { state.mode = LEN; }
-// break;
-//#endif
- }
- if (copy > state.wnext) {
- copy -= state.wnext;
- from = state.wsize - copy;
- }
- else {
- from = state.wnext - copy;
- }
- if (copy > state.length) { copy = state.length; }
- from_source = state.window;
- }
- else { /* copy from output */
- from_source = output;
- from = put - state.offset;
- copy = state.length;
- }
- if (copy > left) { copy = left; }
- left -= copy;
- state.length -= copy;
- do {
- output[put++] = from_source[from++];
- } while (--copy);
- if (state.length === 0) { state.mode = LEN; }
- break;
- case LIT:
- if (left === 0) { break inf_leave; }
- output[put++] = state.length;
- left--;
- state.mode = LEN;
- break;
- case CHECK:
- if (state.wrap) {
- //=== NEEDBITS(32);
- while (bits < 32) {
- if (have === 0) { break inf_leave; }
- have--;
- // Use '|' instead of '+' to make sure that result is signed
- hold |= input[next++] << bits;
- bits += 8;
- }
- //===//
- _out -= left;
- strm.total_out += _out;
- state.total += _out;
- if ((state.wrap & 4) && _out) {
- strm.adler = state.check =
- /*UPDATE_CHECK(state.check, put - _out, _out);*/
- (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out));
-
- }
- _out = left;
- // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too
- if ((state.wrap & 4) && (state.flags ? hold : zswap32(hold)) !== state.check) {
- strm.msg = 'incorrect data check';
- state.mode = BAD;
- break;
- }
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- //Tracev((stderr, "inflate: check matches trailer\n"));
- }
- state.mode = LENGTH;
- /* falls through */
- case LENGTH:
- if (state.wrap && state.flags) {
- //=== NEEDBITS(32);
- while (bits < 32) {
- if (have === 0) { break inf_leave; }
- have--;
- hold += input[next++] << bits;
- bits += 8;
- }
- //===//
- if ((state.wrap & 4) && hold !== (state.total & 0xffffffff)) {
- strm.msg = 'incorrect length check';
- state.mode = BAD;
- break;
- }
- //=== INITBITS();
- hold = 0;
- bits = 0;
- //===//
- //Tracev((stderr, "inflate: length matches trailer\n"));
- }
- state.mode = DONE;
- /* falls through */
- case DONE:
- ret = Z_STREAM_END;
- break inf_leave;
- case BAD:
- ret = Z_DATA_ERROR;
- break inf_leave;
- case MEM:
- return Z_MEM_ERROR;
- case SYNC:
- /* falls through */
- default:
- return Z_STREAM_ERROR;
- }
- }
-
- // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave"
-
- /*
- Return from inflate(), updating the total counts and the check value.
- If there was no progress during the inflate() call, return a buffer
- error. Call updatewindow() to create and/or update the window state.
- Note: a memory error from inflate() is non-recoverable.
- */
-
- //--- RESTORE() ---
- strm.next_out = put;
- strm.avail_out = left;
- strm.next_in = next;
- strm.avail_in = have;
- state.hold = hold;
- state.bits = bits;
- //---
-
- if (state.wsize || (_out !== strm.avail_out && state.mode < BAD &&
- (state.mode < CHECK || flush !== Z_FINISH))) {
- if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) {
- state.mode = MEM;
- return Z_MEM_ERROR;
- }
- }
- _in -= strm.avail_in;
- _out -= strm.avail_out;
- strm.total_in += _in;
- strm.total_out += _out;
- state.total += _out;
- if ((state.wrap & 4) && _out) {
- strm.adler = state.check = /*UPDATE_CHECK(state.check, strm.next_out - _out, _out);*/
- (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out));
- }
- strm.data_type = state.bits + (state.last ? 64 : 0) +
- (state.mode === TYPE ? 128 : 0) +
- (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0);
- if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) {
- ret = Z_BUF_ERROR;
- }
- return ret;
-};
-
-
-const inflateEnd = (strm) => {
-
- if (inflateStateCheck(strm)) {
- return Z_STREAM_ERROR;
- }
-
- let state = strm.state;
- if (state.window) {
- state.window = null;
- }
- strm.state = null;
- return Z_OK;
-};
-
-
-const inflateGetHeader = (strm, head) => {
-
- /* check state */
- if (inflateStateCheck(strm)) { return Z_STREAM_ERROR; }
- const state = strm.state;
- if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; }
-
- /* save header structure */
- state.head = head;
- head.done = false;
- return Z_OK;
-};
-
-
-const inflateSetDictionary = (strm, dictionary) => {
- const dictLength = dictionary.length;
-
- let state;
- let dictid;
- let ret;
-
- /* check state */
- if (inflateStateCheck(strm)) { return Z_STREAM_ERROR; }
- state = strm.state;
-
- if (state.wrap !== 0 && state.mode !== DICT) {
- return Z_STREAM_ERROR;
- }
-
- /* check for correct dictionary identifier */
- if (state.mode === DICT) {
- dictid = 1; /* adler32(0, null, 0)*/
- /* dictid = adler32(dictid, dictionary, dictLength); */
- dictid = adler32(dictid, dictionary, dictLength, 0);
- if (dictid !== state.check) {
- return Z_DATA_ERROR;
- }
- }
- /* copy dictionary to window using updatewindow(), which will amend the
- existing dictionary if appropriate */
- ret = updatewindow(strm, dictionary, dictLength, dictLength);
- if (ret) {
- state.mode = MEM;
- return Z_MEM_ERROR;
- }
- state.havedict = 1;
- // Tracev((stderr, "inflate: dictionary set\n"));
- return Z_OK;
-};
-var inflateInfo = 'pako inflate (from Nodeca project)';
-export {inflateReset,inflateReset2,inflateResetKeep,inflateInit,inflateInit2,inflate,inflateEnd,inflateGetHeader,inflateSetDictionary,inflateInfo }
-
-
-
-/* Not implemented
-module.exports.inflateCodesUsed = inflateCodesUsed;
-module.exports.inflateCopy = inflateCopy;
-module.exports.inflateGetDictionary = inflateGetDictionary;
-module.exports.inflateMark = inflateMark;
-module.exports.inflatePrime = inflatePrime;
-module.exports.inflateSync = inflateSync;
-module.exports.inflateSyncPoint = inflateSyncPoint;
-module.exports.inflateUndermine = inflateUndermine;
-module.exports.inflateValidate = inflateValidate;
-*/
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inftrees.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inftrees.js
deleted file mode 100644
index 429306c..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/inftrees.js
+++ /dev/null
@@ -1,340 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-const MAXBITS = 15;
-const ENOUGH_LENS = 852;
-const ENOUGH_DISTS = 592;
-//const ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS);
-
-const CODES = 0;
-const LENS = 1;
-const DISTS = 2;
-
-const lbase = new Uint16Array([ /* Length codes 257..285 base */
- 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
- 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0
-]);
-
-const lext = new Uint8Array([ /* Length codes 257..285 extra */
- 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,
- 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78
-]);
-
-const dbase = new Uint16Array([ /* Distance codes 0..29 base */
- 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
- 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
- 8193, 12289, 16385, 24577, 0, 0
-]);
-
-const dext = new Uint8Array([ /* Distance codes 0..29 extra */
- 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22,
- 23, 23, 24, 24, 25, 25, 26, 26, 27, 27,
- 28, 28, 29, 29, 64, 64
-]);
-
-const inflate_table = (type, lens, lens_index, codes, table, table_index, work, opts) =>
-{
- const bits = opts.bits;
- //here = opts.here; /* table entry for duplication */
-
- let len = 0; /* a code's length in bits */
- let sym = 0; /* index of code symbols */
- let min = 0, max = 0; /* minimum and maximum code lengths */
- let root = 0; /* number of index bits for root table */
- let curr = 0; /* number of index bits for current table */
- let drop = 0; /* code bits to drop for sub-table */
- let left = 0; /* number of prefix codes available */
- let used = 0; /* code entries in table used */
- let huff = 0; /* Huffman code */
- let incr; /* for incrementing code, index */
- let fill; /* index for replicating entries */
- let low; /* low bits for current root entry */
- let mask; /* mask for low root bits */
- let next; /* next available space in table */
- let base = null; /* base value table to use */
-// let shoextra; /* extra bits table to use */
- let match; /* use base and extra for symbol >= match */
- const count = new Uint16Array(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */
- const offs = new Uint16Array(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */
- let extra = null;
-
- let here_bits, here_op, here_val;
-
- /*
- Process a set of code lengths to create a canonical Huffman code. The
- code lengths are lens[0..codes-1]. Each length corresponds to the
- symbols 0..codes-1. The Huffman code is generated by first sorting the
- symbols by length from short to long, and retaining the symbol order
- for codes with equal lengths. Then the code starts with all zero bits
- for the first code of the shortest length, and the codes are integer
- increments for the same length, and zeros are appended as the length
- increases. For the deflate format, these bits are stored backwards
- from their more natural integer increment ordering, and so when the
- decoding tables are built in the large loop below, the integer codes
- are incremented backwards.
-
- This routine assumes, but does not check, that all of the entries in
- lens[] are in the range 0..MAXBITS. The caller must assure this.
- 1..MAXBITS is interpreted as that code length. zero means that that
- symbol does not occur in this code.
-
- The codes are sorted by computing a count of codes for each length,
- creating from that a table of starting indices for each length in the
- sorted table, and then entering the symbols in order in the sorted
- table. The sorted table is work[], with that space being provided by
- the caller.
-
- The length counts are used for other purposes as well, i.e. finding
- the minimum and maximum length codes, determining if there are any
- codes at all, checking for a valid set of lengths, and looking ahead
- at length counts to determine sub-table sizes when building the
- decoding tables.
- */
-
- /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */
- for (len = 0; len <= MAXBITS; len++) {
- count[len] = 0;
- }
- for (sym = 0; sym < codes; sym++) {
- count[lens[lens_index + sym]]++;
- }
-
- /* bound code lengths, force root to be within code lengths */
- root = bits;
- for (max = MAXBITS; max >= 1; max--) {
- if (count[max] !== 0) { break; }
- }
- if (root > max) {
- root = max;
- }
- if (max === 0) { /* no symbols to code at all */
- //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */
- //table.bits[opts.table_index] = 1; //here.bits = (var char)1;
- //table.val[opts.table_index++] = 0; //here.val = (var short)0;
- table[table_index++] = (1 << 24) | (64 << 16) | 0;
-
-
- //table.op[opts.table_index] = 64;
- //table.bits[opts.table_index] = 1;
- //table.val[opts.table_index++] = 0;
- table[table_index++] = (1 << 24) | (64 << 16) | 0;
-
- opts.bits = 1;
- return 0; /* no symbols, but wait for decoding to report error */
- }
- for (min = 1; min < max; min++) {
- if (count[min] !== 0) { break; }
- }
- if (root < min) {
- root = min;
- }
-
- /* check for an over-subscribed or incomplete set of lengths */
- left = 1;
- for (len = 1; len <= MAXBITS; len++) {
- left <<= 1;
- left -= count[len];
- if (left < 0) {
- return -1;
- } /* over-subscribed */
- }
- if (left > 0 && (type === CODES || max !== 1)) {
- return -1; /* incomplete set */
- }
-
- /* generate offsets into symbol table for each length for sorting */
- offs[1] = 0;
- for (len = 1; len < MAXBITS; len++) {
- offs[len + 1] = offs[len] + count[len];
- }
-
- /* sort symbols by length, by symbol order within each length */
- for (sym = 0; sym < codes; sym++) {
- if (lens[lens_index + sym] !== 0) {
- work[offs[lens[lens_index + sym]]++] = sym;
- }
- }
-
- /*
- Create and fill in decoding tables. In this loop, the table being
- filled is at next and has curr index bits. The code being used is huff
- with length len. That code is converted to an index by dropping drop
- bits off of the bottom. For codes where len is less than drop + curr,
- those top drop + curr - len bits are incremented through all values to
- fill the table with replicated entries.
-
- root is the number of index bits for the root table. When len exceeds
- root, sub-tables are created pointed to by the root entry with an index
- of the low root bits of huff. This is saved in low to check for when a
- new sub-table should be started. drop is zero when the root table is
- being filled, and drop is root when sub-tables are being filled.
-
- When a new sub-table is needed, it is necessary to look ahead in the
- code lengths to determine what size sub-table is needed. The length
- counts are used for this, and so count[] is decremented as codes are
- entered in the tables.
-
- used keeps track of how many table entries have been allocated from the
- provided *table space. It is checked for LENS and DIST tables against
- the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in
- the initial root table size constants. See the comments in inftrees.h
- for more information.
-
- sym increments through all symbols, and the loop terminates when
- all codes of length max, i.e. all codes, have been processed. This
- routine permits incomplete codes, so another loop after this one fills
- in the rest of the decoding tables with invalid code markers.
- */
-
- /* set up for code type */
- // poor man optimization - use if-else instead of switch,
- // to avoid deopts in old v8
- if (type === CODES) {
- base = extra = work; /* dummy value--not used */
- match = 20;
-
- } else if (type === LENS) {
- base = lbase;
- extra = lext;
- match = 257;
-
- } else { /* DISTS */
- base = dbase;
- extra = dext;
- match = 0;
- }
-
- /* initialize opts for loop */
- huff = 0; /* starting code */
- sym = 0; /* starting code symbol */
- len = min; /* starting code length */
- next = table_index; /* current table to fill in */
- curr = root; /* current table index bits */
- drop = 0; /* current bits to drop from code for index */
- low = -1; /* trigger new sub-table when len > root */
- used = 1 << root; /* use root table entries */
- mask = used - 1; /* mask for comparing low */
-
- /* check available table space */
- if ((type === LENS && used > ENOUGH_LENS) ||
- (type === DISTS && used > ENOUGH_DISTS)) {
- return 1;
- }
-
- /* process all codes and make table entries */
- for (;;) {
- /* create table entry */
- here_bits = len - drop;
- if (work[sym] + 1 < match) {
- here_op = 0;
- here_val = work[sym];
- }
- else if (work[sym] >= match) {
- here_op = extra[work[sym] - match];
- here_val = base[work[sym] - match];
- }
- else {
- here_op = 32 + 64; /* end of block */
- here_val = 0;
- }
-
- /* replicate for those indices with low len bits equal to huff */
- incr = 1 << (len - drop);
- fill = 1 << curr;
- min = fill; /* save offset to next table */
- do {
- fill -= incr;
- table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0;
- } while (fill !== 0);
-
- /* backwards increment the len-bit code huff */
- incr = 1 << (len - 1);
- while (huff & incr) {
- incr >>= 1;
- }
- if (incr !== 0) {
- huff &= incr - 1;
- huff += incr;
- } else {
- huff = 0;
- }
-
- /* go to next symbol, update count, len */
- sym++;
- if (--count[len] === 0) {
- if (len === max) { break; }
- len = lens[lens_index + work[sym]];
- }
-
- /* create new sub-table if needed */
- if (len > root && (huff & mask) !== low) {
- /* if first time, transition to sub-tables */
- if (drop === 0) {
- drop = root;
- }
-
- /* increment past last table */
- next += min; /* here min is 1 << curr */
-
- /* determine length of next table */
- curr = len - drop;
- left = 1 << curr;
- while (curr + drop < max) {
- left -= count[curr + drop];
- if (left <= 0) { break; }
- curr++;
- left <<= 1;
- }
-
- /* check for enough space */
- used += 1 << curr;
- if ((type === LENS && used > ENOUGH_LENS) ||
- (type === DISTS && used > ENOUGH_DISTS)) {
- return 1;
- }
-
- /* point entry in root table to sub-table */
- low = huff & mask;
- /*table.op[low] = curr;
- table.bits[low] = root;
- table.val[low] = next - opts.table_index;*/
- table[low] = (root << 24) | (curr << 16) | (next - table_index) |0;
- }
- }
-
- /* fill in remaining table entry if code is incomplete (guaranteed to have
- at most one remaining entry, since if the code is incomplete, the
- maximum code length that was allowed to get this far is one bit) */
- if (huff !== 0) {
- //table.op[next + huff] = 64; /* invalid code marker */
- //table.bits[next + huff] = len - drop;
- //table.val[next + huff] = 0;
- table[next + huff] = ((len - drop) << 24) | (64 << 16) |0;
- }
-
- /* set return parameters */
- //opts.table_index += used;
- opts.bits = root;
- return 0;
-};
-
-
-export {inflate_table};
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/messages.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/messages.js
deleted file mode 100644
index da236e9..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/messages.js
+++ /dev/null
@@ -1,32 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-export default {
- 2: 'need dictionary', /* Z_NEED_DICT 2 */
- 1: 'stream end', /* Z_STREAM_END 1 */
- 0: '', /* Z_OK 0 */
- '-1': 'file error', /* Z_ERRNO (-1) */
- '-2': 'stream error', /* Z_STREAM_ERROR (-2) */
- '-3': 'data error', /* Z_DATA_ERROR (-3) */
- '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */
- '-5': 'buffer error', /* Z_BUF_ERROR (-5) */
- '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */
-};
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/trees.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/trees.js
deleted file mode 100644
index 4aea90d..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/trees.js
+++ /dev/null
@@ -1,1179 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-/* eslint-disable space-unary-ops */
-
-/* Public constants ==========================================================*/
-/* ===========================================================================*/
-
-
-//const Z_FILTERED = 1;
-//const Z_HUFFMAN_ONLY = 2;
-//const Z_RLE = 3;
-const Z_FIXED = 4;
-//const Z_DEFAULT_STRATEGY = 0;
-
-/* Possible values of the data_type field (though see inflate()) */
-const Z_BINARY = 0;
-const Z_TEXT = 1;
-//const Z_ASCII = 1; // = Z_TEXT
-const Z_UNKNOWN = 2;
-
-/*============================================================================*/
-
-
-function zero(buf) { let len = buf.length; while (--len >= 0) { buf[len] = 0; } }
-
-// From zutil.h
-
-const STORED_BLOCK = 0;
-const STATIC_TREES = 1;
-const DYN_TREES = 2;
-/* The three kinds of block type */
-
-const MIN_MATCH = 3;
-const MAX_MATCH = 258;
-/* The minimum and maximum match lengths */
-
-// From deflate.h
-/* ===========================================================================
- * Internal compression state.
- */
-
-const LENGTH_CODES = 29;
-/* number of length codes, not counting the special END_BLOCK code */
-
-const LITERALS = 256;
-/* number of literal bytes 0..255 */
-
-const L_CODES = LITERALS + 1 + LENGTH_CODES;
-/* number of Literal or Length codes, including the END_BLOCK code */
-
-const D_CODES = 30;
-/* number of distance codes */
-
-const BL_CODES = 19;
-/* number of codes used to transfer the bit lengths */
-
-const HEAP_SIZE = 2 * L_CODES + 1;
-/* maximum heap size */
-
-const MAX_BITS = 15;
-/* All codes must not exceed MAX_BITS bits */
-
-const Buf_size = 16;
-/* size of bit buffer in bi_buf */
-
-
-/* ===========================================================================
- * Constants
- */
-
-const MAX_BL_BITS = 7;
-/* Bit length codes must not exceed MAX_BL_BITS bits */
-
-const END_BLOCK = 256;
-/* end of block literal code */
-
-const REP_3_6 = 16;
-/* repeat previous bit length 3-6 times (2 bits of repeat count) */
-
-const REPZ_3_10 = 17;
-/* repeat a zero length 3-10 times (3 bits of repeat count) */
-
-const REPZ_11_138 = 18;
-/* repeat a zero length 11-138 times (7 bits of repeat count) */
-
-/* eslint-disable comma-spacing,array-bracket-spacing */
-const extra_lbits = /* extra bits for each length code */
- new Uint8Array([0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]);
-
-const extra_dbits = /* extra bits for each distance code */
- new Uint8Array([0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]);
-
-const extra_blbits = /* extra bits for each bit length code */
- new Uint8Array([0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]);
-
-const bl_order =
- new Uint8Array([16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]);
-/* eslint-enable comma-spacing,array-bracket-spacing */
-
-/* The lengths of the bit length codes are sent in order of decreasing
- * probability, to avoid transmitting the lengths for unused bit length codes.
- */
-
-/* ===========================================================================
- * Local data. These are initialized only once.
- */
-
-// We pre-fill arrays with 0 to avoid uninitialized gaps
-
-const DIST_CODE_LEN = 512; /* see definition of array dist_code below */
-
-// !!!! Use flat array instead of structure, Freq = i*2, Len = i*2+1
-const static_ltree = new Array((L_CODES + 2) * 2);
-zero(static_ltree);
-/* The static literal tree. Since the bit lengths are imposed, there is no
- * need for the L_CODES extra codes used during heap construction. However
- * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
- * below).
- */
-
-const static_dtree = new Array(D_CODES * 2);
-zero(static_dtree);
-/* The static distance tree. (Actually a trivial tree since all codes use
- * 5 bits.)
- */
-
-const _dist_code = new Array(DIST_CODE_LEN);
-zero(_dist_code);
-/* Distance codes. The first 256 values correspond to the distances
- * 3 .. 258, the last 256 values correspond to the top 8 bits of
- * the 15 bit distances.
- */
-
-const _length_code = new Array(MAX_MATCH - MIN_MATCH + 1);
-zero(_length_code);
-/* length code for each normalized match length (0 == MIN_MATCH) */
-
-const base_length = new Array(LENGTH_CODES);
-zero(base_length);
-/* First normalized length for each code (0 = MIN_MATCH) */
-
-const base_dist = new Array(D_CODES);
-zero(base_dist);
-/* First normalized distance for each code (0 = distance of 1) */
-
-
-function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) {
-
- this.static_tree = static_tree; /* static tree or NULL */
- this.extra_bits = extra_bits; /* extra bits for each code or NULL */
- this.extra_base = extra_base; /* base index for extra_bits */
- this.elems = elems; /* max number of elements in the tree */
- this.max_length = max_length; /* max bit length for the codes */
-
- // show if `static_tree` has data or dummy - needed for monomorphic objects
- this.has_stree = static_tree && static_tree.length;
-}
-
-
-let static_l_desc;
-let static_d_desc;
-let static_bl_desc;
-
-
-function TreeDesc(dyn_tree, stat_desc) {
- this.dyn_tree = dyn_tree; /* the dynamic tree */
- this.max_code = 0; /* largest code with non zero frequency */
- this.stat_desc = stat_desc; /* the corresponding static tree */
-}
-
-
-
-const d_code = (dist) => {
-
- return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)];
-};
-
-
-/* ===========================================================================
- * Output a short LSB first on the stream.
- * IN assertion: there is enough room in pendingBuf.
- */
-const put_short = (s, w) => {
-// put_byte(s, (uch)((w) & 0xff));
-// put_byte(s, (uch)((ush)(w) >> 8));
- s.pending_buf[s.pending++] = (w) & 0xff;
- s.pending_buf[s.pending++] = (w >>> 8) & 0xff;
-};
-
-
-/* ===========================================================================
- * Send a value on a given number of bits.
- * IN assertion: length <= 16 and value fits in length bits.
- */
-const send_bits = (s, value, length) => {
-
- if (s.bi_valid > (Buf_size - length)) {
- s.bi_buf |= (value << s.bi_valid) & 0xffff;
- put_short(s, s.bi_buf);
- s.bi_buf = value >> (Buf_size - s.bi_valid);
- s.bi_valid += length - Buf_size;
- } else {
- s.bi_buf |= (value << s.bi_valid) & 0xffff;
- s.bi_valid += length;
- }
-};
-
-
-const send_code = (s, c, tree) => {
-
- send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/);
-};
-
-
-/* ===========================================================================
- * Reverse the first len bits of a code, using straightforward code (a faster
- * method would use a table)
- * IN assertion: 1 <= len <= 15
- */
-const bi_reverse = (code, len) => {
-
- let res = 0;
- do {
- res |= code & 1;
- code >>>= 1;
- res <<= 1;
- } while (--len > 0);
- return res >>> 1;
-};
-
-
-/* ===========================================================================
- * Flush the bit buffer, keeping at most 7 bits in it.
- */
-const bi_flush = (s) => {
-
- if (s.bi_valid === 16) {
- put_short(s, s.bi_buf);
- s.bi_buf = 0;
- s.bi_valid = 0;
-
- } else if (s.bi_valid >= 8) {
- s.pending_buf[s.pending++] = s.bi_buf & 0xff;
- s.bi_buf >>= 8;
- s.bi_valid -= 8;
- }
-};
-
-
-/* ===========================================================================
- * Compute the optimal bit lengths for a tree and update the total bit length
- * for the current block.
- * IN assertion: the fields freq and dad are set, heap[heap_max] and
- * above are the tree nodes sorted by increasing frequency.
- * OUT assertions: the field len is set to the optimal bit length, the
- * array bl_count contains the frequencies for each bit length.
- * The length opt_len is updated; static_len is also updated if stree is
- * not null.
- */
-const gen_bitlen = (s, desc) => {
-// deflate_state *s;
-// tree_desc *desc; /* the tree descriptor */
-
- const tree = desc.dyn_tree;
- const max_code = desc.max_code;
- const stree = desc.stat_desc.static_tree;
- const has_stree = desc.stat_desc.has_stree;
- const extra = desc.stat_desc.extra_bits;
- const base = desc.stat_desc.extra_base;
- const max_length = desc.stat_desc.max_length;
- let h; /* heap index */
- let n, m; /* iterate over the tree elements */
- let bits; /* bit length */
- let xbits; /* extra bits */
- let f; /* frequency */
- let overflow = 0; /* number of elements with bit length too large */
-
- for (bits = 0; bits <= MAX_BITS; bits++) {
- s.bl_count[bits] = 0;
- }
-
- /* In a first pass, compute the optimal bit lengths (which may
- * overflow in the case of the bit length tree).
- */
- tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */
-
- for (h = s.heap_max + 1; h < HEAP_SIZE; h++) {
- n = s.heap[h];
- bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1;
- if (bits > max_length) {
- bits = max_length;
- overflow++;
- }
- tree[n * 2 + 1]/*.Len*/ = bits;
- /* We overwrite tree[n].Dad which is no longer needed */
-
- if (n > max_code) { continue; } /* not a leaf node */
-
- s.bl_count[bits]++;
- xbits = 0;
- if (n >= base) {
- xbits = extra[n - base];
- }
- f = tree[n * 2]/*.Freq*/;
- s.opt_len += f * (bits + xbits);
- if (has_stree) {
- s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits);
- }
- }
- if (overflow === 0) { return; }
-
- // Tracev((stderr,"\nbit length overflow\n"));
- /* This happens for example on obj2 and pic of the Calgary corpus */
-
- /* Find the first bit length which could increase: */
- do {
- bits = max_length - 1;
- while (s.bl_count[bits] === 0) { bits--; }
- s.bl_count[bits]--; /* move one leaf down the tree */
- s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */
- s.bl_count[max_length]--;
- /* The brother of the overflow item also moves one step up,
- * but this does not affect bl_count[max_length]
- */
- overflow -= 2;
- } while (overflow > 0);
-
- /* Now recompute all bit lengths, scanning in increasing frequency.
- * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
- * lengths instead of fixing only the wrong ones. This idea is taken
- * from 'ar' written by Haruhiko Okumura.)
- */
- for (bits = max_length; bits !== 0; bits--) {
- n = s.bl_count[bits];
- while (n !== 0) {
- m = s.heap[--h];
- if (m > max_code) { continue; }
- if (tree[m * 2 + 1]/*.Len*/ !== bits) {
- // Tracev((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
- s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/;
- tree[m * 2 + 1]/*.Len*/ = bits;
- }
- n--;
- }
- }
-};
-
-
-/* ===========================================================================
- * Generate the codes for a given tree and bit counts (which need not be
- * optimal).
- * IN assertion: the array bl_count contains the bit length statistics for
- * the given tree and the field len is set for all tree elements.
- * OUT assertion: the field code is set for all tree elements of non
- * zero code length.
- */
-const gen_codes = (tree, max_code, bl_count) => {
-// ct_data *tree; /* the tree to decorate */
-// int max_code; /* largest code with non zero frequency */
-// ushf *bl_count; /* number of codes at each bit length */
-
- const next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */
- let code = 0; /* running code value */
- let bits; /* bit index */
- let n; /* code index */
-
- /* The distribution counts are first used to generate the code values
- * without bit reversal.
- */
- for (bits = 1; bits <= MAX_BITS; bits++) {
- code = (code + bl_count[bits - 1]) << 1;
- next_code[bits] = code;
- }
- /* Check that the bit counts in bl_count are consistent. The last code
- * must be all ones.
- */
- //Assert (code + bl_count[MAX_BITS]-1 == (1< {
-
- let n; /* iterates over tree elements */
- let bits; /* bit counter */
- let length; /* length value */
- let code; /* code value */
- let dist; /* distance index */
- const bl_count = new Array(MAX_BITS + 1);
- /* number of codes at each bit length for an optimal tree */
-
- // do check in _tr_init()
- //if (static_init_done) return;
-
- /* For some embedded targets, global variables are not initialized: */
-/*#ifdef NO_INIT_GLOBAL_POINTERS
- static_l_desc.static_tree = static_ltree;
- static_l_desc.extra_bits = extra_lbits;
- static_d_desc.static_tree = static_dtree;
- static_d_desc.extra_bits = extra_dbits;
- static_bl_desc.extra_bits = extra_blbits;
-#endif*/
-
- /* Initialize the mapping length (0..255) -> length code (0..28) */
- length = 0;
- for (code = 0; code < LENGTH_CODES - 1; code++) {
- base_length[code] = length;
- for (n = 0; n < (1 << extra_lbits[code]); n++) {
- _length_code[length++] = code;
- }
- }
- //Assert (length == 256, "tr_static_init: length != 256");
- /* Note that the length 255 (match length 258) can be represented
- * in two different ways: code 284 + 5 bits or code 285, so we
- * overwrite length_code[255] to use the best encoding:
- */
- _length_code[length - 1] = code;
-
- /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
- dist = 0;
- for (code = 0; code < 16; code++) {
- base_dist[code] = dist;
- for (n = 0; n < (1 << extra_dbits[code]); n++) {
- _dist_code[dist++] = code;
- }
- }
- //Assert (dist == 256, "tr_static_init: dist != 256");
- dist >>= 7; /* from now on, all distances are divided by 128 */
- for (; code < D_CODES; code++) {
- base_dist[code] = dist << 7;
- for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {
- _dist_code[256 + dist++] = code;
- }
- }
- //Assert (dist == 256, "tr_static_init: 256+dist != 512");
-
- /* Construct the codes of the static literal tree */
- for (bits = 0; bits <= MAX_BITS; bits++) {
- bl_count[bits] = 0;
- }
-
- n = 0;
- while (n <= 143) {
- static_ltree[n * 2 + 1]/*.Len*/ = 8;
- n++;
- bl_count[8]++;
- }
- while (n <= 255) {
- static_ltree[n * 2 + 1]/*.Len*/ = 9;
- n++;
- bl_count[9]++;
- }
- while (n <= 279) {
- static_ltree[n * 2 + 1]/*.Len*/ = 7;
- n++;
- bl_count[7]++;
- }
- while (n <= 287) {
- static_ltree[n * 2 + 1]/*.Len*/ = 8;
- n++;
- bl_count[8]++;
- }
- /* Codes 286 and 287 do not exist, but we must include them in the
- * tree construction to get a canonical Huffman tree (longest code
- * all ones)
- */
- gen_codes(static_ltree, L_CODES + 1, bl_count);
-
- /* The static distance tree is trivial: */
- for (n = 0; n < D_CODES; n++) {
- static_dtree[n * 2 + 1]/*.Len*/ = 5;
- static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5);
- }
-
- // Now data ready and we can init static trees
- static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS);
- static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS);
- static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS);
-
- //static_init_done = true;
-};
-
-
-/* ===========================================================================
- * Initialize a new block.
- */
-const init_block = (s) => {
-
- let n; /* iterates over tree elements */
-
- /* Initialize the trees. */
- for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; }
- for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; }
- for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; }
-
- s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1;
- s.opt_len = s.static_len = 0;
- s.sym_next = s.matches = 0;
-};
-
-
-/* ===========================================================================
- * Flush the bit buffer and align the output on a byte boundary
- */
-const bi_windup = (s) =>
-{
- if (s.bi_valid > 8) {
- put_short(s, s.bi_buf);
- } else if (s.bi_valid > 0) {
- //put_byte(s, (Byte)s->bi_buf);
- s.pending_buf[s.pending++] = s.bi_buf;
- }
- s.bi_buf = 0;
- s.bi_valid = 0;
-};
-
-/* ===========================================================================
- * Compares to subtrees, using the tree depth as tie breaker when
- * the subtrees have equal frequency. This minimizes the worst case length.
- */
-const smaller = (tree, n, m, depth) => {
-
- const _n2 = n * 2;
- const _m2 = m * 2;
- return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ ||
- (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m]));
-};
-
-/* ===========================================================================
- * Restore the heap property by moving down the tree starting at node k,
- * exchanging a node with the smallest of its two sons if necessary, stopping
- * when the heap property is re-established (each father smaller than its
- * two sons).
- */
-const pqdownheap = (s, tree, k) => {
-// deflate_state *s;
-// ct_data *tree; /* the tree to restore */
-// int k; /* node to move down */
-
- const v = s.heap[k];
- let j = k << 1; /* left son of k */
- while (j <= s.heap_len) {
- /* Set j to the smallest of the two sons: */
- if (j < s.heap_len &&
- smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) {
- j++;
- }
- /* Exit if v is smaller than both sons */
- if (smaller(tree, v, s.heap[j], s.depth)) { break; }
-
- /* Exchange v with the smallest son */
- s.heap[k] = s.heap[j];
- k = j;
-
- /* And continue down the tree, setting j to the left son of k */
- j <<= 1;
- }
- s.heap[k] = v;
-};
-
-
-// inlined manually
-// const SMALLEST = 1;
-
-/* ===========================================================================
- * Send the block data compressed using the given Huffman trees
- */
-const compress_block = (s, ltree, dtree) => {
-// deflate_state *s;
-// const ct_data *ltree; /* literal tree */
-// const ct_data *dtree; /* distance tree */
-
- let dist; /* distance of matched string */
- let lc; /* match length or unmatched char (if dist == 0) */
- let sx = 0; /* running index in sym_buf */
- let code; /* the code to send */
- let extra; /* number of extra bits to send */
-
- if (s.sym_next !== 0) {
- do {
- dist = s.pending_buf[s.sym_buf + sx++] & 0xff;
- dist += (s.pending_buf[s.sym_buf + sx++] & 0xff) << 8;
- lc = s.pending_buf[s.sym_buf + sx++];
- if (dist === 0) {
- send_code(s, lc, ltree); /* send a literal byte */
- //Tracecv(isgraph(lc), (stderr," '%c' ", lc));
- } else {
- /* Here, lc is the match length - MIN_MATCH */
- code = _length_code[lc];
- send_code(s, code + LITERALS + 1, ltree); /* send the length code */
- extra = extra_lbits[code];
- if (extra !== 0) {
- lc -= base_length[code];
- send_bits(s, lc, extra); /* send the extra length bits */
- }
- dist--; /* dist is now the match distance - 1 */
- code = d_code(dist);
- //Assert (code < D_CODES, "bad d_code");
-
- send_code(s, code, dtree); /* send the distance code */
- extra = extra_dbits[code];
- if (extra !== 0) {
- dist -= base_dist[code];
- send_bits(s, dist, extra); /* send the extra distance bits */
- }
- } /* literal or match pair ? */
-
- /* Check that the overlay between pending_buf and sym_buf is ok: */
- //Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow");
-
- } while (sx < s.sym_next);
- }
-
- send_code(s, END_BLOCK, ltree);
-};
-
-
-/* ===========================================================================
- * Construct one Huffman tree and assigns the code bit strings and lengths.
- * Update the total bit length for the current block.
- * IN assertion: the field freq is set for all tree elements.
- * OUT assertions: the fields len and code are set to the optimal bit length
- * and corresponding code. The length opt_len is updated; static_len is
- * also updated if stree is not null. The field max_code is set.
- */
-const build_tree = (s, desc) => {
-// deflate_state *s;
-// tree_desc *desc; /* the tree descriptor */
-
- const tree = desc.dyn_tree;
- const stree = desc.stat_desc.static_tree;
- const has_stree = desc.stat_desc.has_stree;
- const elems = desc.stat_desc.elems;
- let n, m; /* iterate over heap elements */
- let max_code = -1; /* largest code with non zero frequency */
- let node; /* new node being created */
-
- /* Construct the initial heap, with least frequent element in
- * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
- * heap[0] is not used.
- */
- s.heap_len = 0;
- s.heap_max = HEAP_SIZE;
-
- for (n = 0; n < elems; n++) {
- if (tree[n * 2]/*.Freq*/ !== 0) {
- s.heap[++s.heap_len] = max_code = n;
- s.depth[n] = 0;
-
- } else {
- tree[n * 2 + 1]/*.Len*/ = 0;
- }
- }
-
- /* The pkzip format requires that at least one distance code exists,
- * and that at least one bit should be sent even if there is only one
- * possible code. So to avoid special checks later on we force at least
- * two codes of non zero frequency.
- */
- while (s.heap_len < 2) {
- node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0);
- tree[node * 2]/*.Freq*/ = 1;
- s.depth[node] = 0;
- s.opt_len--;
-
- if (has_stree) {
- s.static_len -= stree[node * 2 + 1]/*.Len*/;
- }
- /* node is 0 or 1 so it does not have extra bits */
- }
- desc.max_code = max_code;
-
- /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
- * establish sub-heaps of increasing lengths:
- */
- for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); }
-
- /* Construct the Huffman tree by repeatedly combining the least two
- * frequent nodes.
- */
- node = elems; /* next internal node of the tree */
- do {
- //pqremove(s, tree, n); /* n = node of least frequency */
- /*** pqremove ***/
- n = s.heap[1/*SMALLEST*/];
- s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--];
- pqdownheap(s, tree, 1/*SMALLEST*/);
- /***/
-
- m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */
-
- s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */
- s.heap[--s.heap_max] = m;
-
- /* Create a new node father of n and m */
- tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/;
- s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1;
- tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node;
-
- /* and insert the new node in the heap */
- s.heap[1/*SMALLEST*/] = node++;
- pqdownheap(s, tree, 1/*SMALLEST*/);
-
- } while (s.heap_len >= 2);
-
- s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/];
-
- /* At this point, the fields freq and dad are set. We can now
- * generate the bit lengths.
- */
- gen_bitlen(s, desc);
-
- /* The field len is now set, we can generate the bit codes */
- gen_codes(tree, max_code, s.bl_count);
-};
-
-
-/* ===========================================================================
- * Scan a literal or distance tree to determine the frequencies of the codes
- * in the bit length tree.
- */
-const scan_tree = (s, tree, max_code) => {
-// deflate_state *s;
-// ct_data *tree; /* the tree to be scanned */
-// int max_code; /* and its largest code of non zero frequency */
-
- let n; /* iterates over all tree elements */
- let prevlen = -1; /* last emitted length */
- let curlen; /* length of current code */
-
- let nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */
-
- let count = 0; /* repeat count of the current code */
- let max_count = 7; /* max repeat count */
- let min_count = 4; /* min repeat count */
-
- if (nextlen === 0) {
- max_count = 138;
- min_count = 3;
- }
- tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */
-
- for (n = 0; n <= max_code; n++) {
- curlen = nextlen;
- nextlen = tree[(n + 1) * 2 + 1]/*.Len*/;
-
- if (++count < max_count && curlen === nextlen) {
- continue;
-
- } else if (count < min_count) {
- s.bl_tree[curlen * 2]/*.Freq*/ += count;
-
- } else if (curlen !== 0) {
-
- if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; }
- s.bl_tree[REP_3_6 * 2]/*.Freq*/++;
-
- } else if (count <= 10) {
- s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++;
-
- } else {
- s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++;
- }
-
- count = 0;
- prevlen = curlen;
-
- if (nextlen === 0) {
- max_count = 138;
- min_count = 3;
-
- } else if (curlen === nextlen) {
- max_count = 6;
- min_count = 3;
-
- } else {
- max_count = 7;
- min_count = 4;
- }
- }
-};
-
-
-/* ===========================================================================
- * Send a literal or distance tree in compressed form, using the codes in
- * bl_tree.
- */
-const send_tree = (s, tree, max_code) => {
-// deflate_state *s;
-// ct_data *tree; /* the tree to be scanned */
-// int max_code; /* and its largest code of non zero frequency */
-
- let n; /* iterates over all tree elements */
- let prevlen = -1; /* last emitted length */
- let curlen; /* length of current code */
-
- let nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */
-
- let count = 0; /* repeat count of the current code */
- let max_count = 7; /* max repeat count */
- let min_count = 4; /* min repeat count */
-
- /* tree[max_code+1].Len = -1; */ /* guard already set */
- if (nextlen === 0) {
- max_count = 138;
- min_count = 3;
- }
-
- for (n = 0; n <= max_code; n++) {
- curlen = nextlen;
- nextlen = tree[(n + 1) * 2 + 1]/*.Len*/;
-
- if (++count < max_count && curlen === nextlen) {
- continue;
-
- } else if (count < min_count) {
- do { send_code(s, curlen, s.bl_tree); } while (--count !== 0);
-
- } else if (curlen !== 0) {
- if (curlen !== prevlen) {
- send_code(s, curlen, s.bl_tree);
- count--;
- }
- //Assert(count >= 3 && count <= 6, " 3_6?");
- send_code(s, REP_3_6, s.bl_tree);
- send_bits(s, count - 3, 2);
-
- } else if (count <= 10) {
- send_code(s, REPZ_3_10, s.bl_tree);
- send_bits(s, count - 3, 3);
-
- } else {
- send_code(s, REPZ_11_138, s.bl_tree);
- send_bits(s, count - 11, 7);
- }
-
- count = 0;
- prevlen = curlen;
- if (nextlen === 0) {
- max_count = 138;
- min_count = 3;
-
- } else if (curlen === nextlen) {
- max_count = 6;
- min_count = 3;
-
- } else {
- max_count = 7;
- min_count = 4;
- }
- }
-};
-
-
-/* ===========================================================================
- * Construct the Huffman tree for the bit lengths and return the index in
- * bl_order of the last bit length code to send.
- */
-const build_bl_tree = (s) => {
-
- let max_blindex; /* index of last bit length code of non zero freq */
-
- /* Determine the bit length frequencies for literal and distance trees */
- scan_tree(s, s.dyn_ltree, s.l_desc.max_code);
- scan_tree(s, s.dyn_dtree, s.d_desc.max_code);
-
- /* Build the bit length tree: */
- build_tree(s, s.bl_desc);
- /* opt_len now includes the length of the tree representations, except
- * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
- */
-
- /* Determine the number of bit length codes to send. The pkzip format
- * requires that at least 4 bit length codes be sent. (appnote.txt says
- * 3 but the actual value used is 4.)
- */
- for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) {
- if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) {
- break;
- }
- }
- /* Update opt_len to include the bit length tree and counts */
- s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
- //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
- // s->opt_len, s->static_len));
-
- return max_blindex;
-};
-
-
-/* ===========================================================================
- * Send the header for a block using dynamic Huffman trees: the counts, the
- * lengths of the bit length codes, the literal tree and the distance tree.
- * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
- */
-const send_all_trees = (s, lcodes, dcodes, blcodes) => {
-// deflate_state *s;
-// int lcodes, dcodes, blcodes; /* number of codes for each tree */
-
- let rank; /* index in bl_order */
-
- //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
- //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
- // "too many codes");
- //Tracev((stderr, "\nbl counts: "));
- send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */
- send_bits(s, dcodes - 1, 5);
- send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */
- for (rank = 0; rank < blcodes; rank++) {
- //Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
- send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3);
- }
- //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
-
- send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */
- //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
-
- send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */
- //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
-};
-
-
-/* ===========================================================================
- * Check if the data type is TEXT or BINARY, using the following algorithm:
- * - TEXT if the two conditions below are satisfied:
- * a) There are no non-portable control characters belonging to the
- * "block list" (0..6, 14..25, 28..31).
- * b) There is at least one printable character belonging to the
- * "allow list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
- * - BINARY otherwise.
- * - The following partially-portable control characters form a
- * "gray list" that is ignored in this detection algorithm:
- * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
- * IN assertion: the fields Freq of dyn_ltree are set.
- */
-const detect_data_type = (s) => {
- /* block_mask is the bit mask of block-listed bytes
- * set bits 0..6, 14..25, and 28..31
- * 0xf3ffc07f = binary 11110011111111111100000001111111
- */
- let block_mask = 0xf3ffc07f;
- let n;
-
- /* Check for non-textual ("block-listed") bytes. */
- for (n = 0; n <= 31; n++, block_mask >>>= 1) {
- if ((block_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) {
- return Z_BINARY;
- }
- }
-
- /* Check for textual ("allow-listed") bytes. */
- if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 ||
- s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) {
- return Z_TEXT;
- }
- for (n = 32; n < LITERALS; n++) {
- if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) {
- return Z_TEXT;
- }
- }
-
- /* There are no "block-listed" or "allow-listed" bytes:
- * this stream either is empty or has tolerated ("gray-listed") bytes only.
- */
- return Z_BINARY;
-};
-
-
-let static_init_done = false;
-
-/* ===========================================================================
- * Initialize the tree data structures for a new zlib stream.
- */
-const _tr_init = (s) =>
-{
-
- if (!static_init_done) {
- tr_static_init();
- static_init_done = true;
- }
-
- s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc);
- s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc);
- s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc);
-
- s.bi_buf = 0;
- s.bi_valid = 0;
-
- /* Initialize the first block of the first file: */
- init_block(s);
-};
-
-
-/* ===========================================================================
- * Send a stored block
- */
-const _tr_stored_block = (s, buf, stored_len, last) => {
-//DeflateState *s;
-//charf *buf; /* input block */
-//ulg stored_len; /* length of input block */
-//int last; /* one if this is the last block for a file */
-
- send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */
- bi_windup(s); /* align on byte boundary */
- put_short(s, stored_len);
- put_short(s, ~stored_len);
- if (stored_len) {
- s.pending_buf.set(s.window.subarray(buf, buf + stored_len), s.pending);
- }
- s.pending += stored_len;
-};
-
-
-/* ===========================================================================
- * Send one empty static block to give enough lookahead for inflate.
- * This takes 10 bits, of which 7 may remain in the bit buffer.
- */
-const _tr_align = (s) => {
- send_bits(s, STATIC_TREES << 1, 3);
- send_code(s, END_BLOCK, static_ltree);
- bi_flush(s);
-};
-
-
-/* ===========================================================================
- * Determine the best encoding for the current block: dynamic trees, static
- * trees or store, and write out the encoded block.
- */
-const _tr_flush_block = (s, buf, stored_len, last) => {
-//DeflateState *s;
-//charf *buf; /* input block, or NULL if too old */
-//ulg stored_len; /* length of input block */
-//int last; /* one if this is the last block for a file */
-
- let opt_lenb, static_lenb; /* opt_len and static_len in bytes */
- let max_blindex = 0; /* index of last bit length code of non zero freq */
-
- /* Build the Huffman trees unless a stored block is forced */
- if (s.level > 0) {
-
- /* Check if the file is binary or text */
- if (s.strm.data_type === Z_UNKNOWN) {
- s.strm.data_type = detect_data_type(s);
- }
-
- /* Construct the literal and distance trees */
- build_tree(s, s.l_desc);
- // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
- // s->static_len));
-
- build_tree(s, s.d_desc);
- // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
- // s->static_len));
- /* At this point, opt_len and static_len are the total bit lengths of
- * the compressed block data, excluding the tree representations.
- */
-
- /* Build the bit length tree for the above two trees, and get the index
- * in bl_order of the last bit length code to send.
- */
- max_blindex = build_bl_tree(s);
-
- /* Determine the best encoding. Compute the block lengths in bytes. */
- opt_lenb = (s.opt_len + 3 + 7) >>> 3;
- static_lenb = (s.static_len + 3 + 7) >>> 3;
-
- // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
- // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
- // s->sym_next / 3));
-
- if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; }
-
- } else {
- // Assert(buf != (char*)0, "lost buf");
- opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
- }
-
- if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) {
- /* 4: two words for the lengths */
-
- /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
- * Otherwise we can't have processed more than WSIZE input bytes since
- * the last block flush, because compression would have been
- * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
- * transform a block into a stored block.
- */
- _tr_stored_block(s, buf, stored_len, last);
-
- } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) {
-
- send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3);
- compress_block(s, static_ltree, static_dtree);
-
- } else {
- send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3);
- send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1);
- compress_block(s, s.dyn_ltree, s.dyn_dtree);
- }
- // Assert (s->compressed_len == s->bits_sent, "bad compressed size");
- /* The above check is made mod 2^32, for files larger than 512 MB
- * and uLong implemented on 32 bits.
- */
- init_block(s);
-
- if (last) {
- bi_windup(s);
- }
- // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
- // s->compressed_len-7*last));
-};
-
-/* ===========================================================================
- * Save the match info and tally the frequency counts. Return true if
- * the current block must be flushed.
- */
-const _tr_tally = (s, dist, lc) => {
-// deflate_state *s;
-// unsigned dist; /* distance of matched string */
-// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */
-
- s.pending_buf[s.sym_buf + s.sym_next++] = dist;
- s.pending_buf[s.sym_buf + s.sym_next++] = dist >> 8;
- s.pending_buf[s.sym_buf + s.sym_next++] = lc;
- if (dist === 0) {
- /* lc is the unmatched char */
- s.dyn_ltree[lc * 2]/*.Freq*/++;
- } else {
- s.matches++;
- /* Here, lc is the match length - MIN_MATCH */
- dist--; /* dist = match distance - 1 */
- //Assert((ush)dist < (ush)MAX_DIST(s) &&
- // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
- // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
-
- s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++;
- s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++;
- }
-
- return (s.sym_next === s.sym_end);
-};
-
-export {
- _tr_init,_tr_stored_block,_tr_flush_block,_tr_tally,_tr_align
-}
-
-
diff --git a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/zstream.js b/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/zstream.js
deleted file mode 100644
index 1672f2a..0000000
--- a/imageknife/src/main/ets/components/3rd_party/pako/lib/zlib/zstream.js
+++ /dev/null
@@ -1,47 +0,0 @@
-'use strict';
-
-// (C) 1995-2013 Jean-loup Gailly and Mark Adler
-// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
-//
-// This software is provided 'as-is', without any express or implied
-// warranty. In no event will the authors be held liable for any damages
-// arising from the use of this software.
-//
-// Permission is granted to anyone to use this software for any purpose,
-// including commercial applications, and to alter it and redistribute it
-// freely, subject to the following restrictions:
-//
-// 1. The origin of this software must not be misrepresented; you must not
-// claim that you wrote the original software. If you use this software
-// in a product, an acknowledgment in the product documentation would be
-// appreciated but is not required.
-// 2. Altered source versions must be plainly marked as such, and must not be
-// misrepresented as being the original software.
-// 3. This notice may not be removed or altered from any source distribution.
-
-function ZStream() {
- /* next input byte */
- this.input = null; // JS specific, because we have no pointers
- this.next_in = 0;
- /* number of bytes available at input */
- this.avail_in = 0;
- /* total number of input bytes read so far */
- this.total_in = 0;
- /* next output byte should be put there */
- this.output = null; // JS specific, because we have no pointers
- this.next_out = 0;
- /* remaining free space at output */
- this.avail_out = 0;
- /* total number of bytes output so far */
- this.total_out = 0;
- /* last error message, NULL if no error */
- this.msg = ''/*Z_NULL*/;
- /* not visible by applications */
- this.state = null;
- /* best guess about the data type: binary or text */
- this.data_type = 2/*Z_UNKNOWN*/;
- /* adler32 value of the uncompressed data */
- this.adler = 0;
-}
-
-export {ZStream};
diff --git a/imageknife/src/main/ets/components/3rd_party/upng/UPNG.js b/imageknife/src/main/ets/components/3rd_party/upng/UPNG.js
index bfb54f2..acb04a0 100644
--- a/imageknife/src/main/ets/components/3rd_party/upng/UPNG.js
+++ b/imageknife/src/main/ets/components/3rd_party/upng/UPNG.js
@@ -20,8 +20,7 @@
*LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
*OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
*SOFTWARE.*/
-import {deflate} from '../pako/lib/deflate'
-var pako = {deflate}
+import pako from 'pako'
const UZIP = null;
export {
diff --git a/imageknife/src/main/ets/components/imageknife/ImageKnifeComponent.ets b/imageknife/src/main/ets/components/imageknife/ImageKnifeComponent.ets
index 37d80b7..7f1a64c 100644
--- a/imageknife/src/main/ets/components/imageknife/ImageKnifeComponent.ets
+++ b/imageknife/src/main/ets/components/imageknife/ImageKnifeComponent.ets
@@ -22,6 +22,7 @@ import { GIFFrame } from '../imageknife/utils/gif/GIFFrame'
import { IDrawLifeCycle } from '../imageknife/interface/IDrawLifeCycle'
import { LogUtil } from '../imageknife/utils/LogUtil'
import { BusinessError } from '@ohos.base'
+import common from '@ohos.app.ability.common'
@Component
export struct ImageKnifeComponent {
@@ -260,6 +261,14 @@ export struct ImageKnifeComponent {
}
}
+ configHspContext(request: RequestOption){
+ if(this.imageKnifeOption.context != undefined){
+ request.setModuleContext(this.imageKnifeOption.context)
+ }else{
+ request.setModuleContext(getContext(this) as common.UIAbilityContext)
+ }
+ }
+
configRenderGpu(request: RequestOption) {
if (this.imageKnifeOption.enableGpu) {
request.enableGPU()
@@ -285,6 +294,7 @@ export struct ImageKnifeComponent {
this.configNecessary(request);
this.configCacheStrategy(request);
this.configDisplay(request);
+ this.configHspContext(request)
this.configRenderGpu(request);
if(ImageKnifeGlobal.getInstance().getImageKnife()!=undefined) {
ImageKnifeGlobal.getInstance().getImageKnife()?.call(request);
diff --git a/imageknife/src/main/ets/components/imageknife/ImageKnifeOption.ets b/imageknife/src/main/ets/components/imageknife/ImageKnifeOption.ets
index 2180386..0c4e00d 100644
--- a/imageknife/src/main/ets/components/imageknife/ImageKnifeOption.ets
+++ b/imageknife/src/main/ets/components/imageknife/ImageKnifeOption.ets
@@ -25,6 +25,7 @@ import { ScaleType } from '../imageknife/ImageKnifeComponent'
import { rgbColor } from './transform/CropCircleWithBorderTransformation'
import { RoundCorner } from './transform/RoundedCornersTransformation'
import { ObjectKey } from './ObjectKey'
+import common from '@ohos.app.ability.common'
export interface CropCircleWithBorder{
border: number,
@@ -125,7 +126,7 @@ export class ImageKnifeOption {
allCacheInfoCallback?: IAllCacheInfoCallback;
-
+ context?: common.UIAbilityContext;
// sizeAnimate?: AnimateParam 由业务自定义不再支持
constructor() {
diff --git a/imageknife/src/main/ets/components/imageknife/RequestOption.ets b/imageknife/src/main/ets/components/imageknife/RequestOption.ets
index d7e7c3f..78ef169 100644
--- a/imageknife/src/main/ets/components/imageknife/RequestOption.ets
+++ b/imageknife/src/main/ets/components/imageknife/RequestOption.ets
@@ -49,6 +49,7 @@ import { LogUtil } from '../imageknife/utils/LogUtil'
import { ImageKnifeGlobal } from './ImageKnifeGlobal'
import { BusinessError } from '@ohos.base'
import { ObjectKey } from './ObjectKey'
+import common from '@ohos.app.ability.common'
export interface Size {
width: number,
@@ -130,12 +131,19 @@ export class RequestOption {
}
}
}
-
+ // module资源的需要当前的module context
+ moduleContext?:common.UIAbilityContext = undefined;
constructor() {
// 初始化全局监听
this.requestListeners = new Array();
// 初始化唯一标识,可以用这个标识找到ImageKnife中的对象
this.uuid = this.generateUUID();
+
+
+ let ctx = ImageKnifeGlobal.getInstance().getHapContext() as common.UIAbilityContext
+ if(ctx != undefined){
+ this.moduleContext = ctx;
+ }
}
generateUUID(): string {
@@ -147,7 +155,13 @@ export class RequestOption {
});
return uuid;
}
+ setModuleContext(moduleCtx:common.UIAbilityContext){
+ this.moduleContext = moduleCtx;
+ }
+ getModuleContext():common.UIAbilityContext | undefined{
+ return this.moduleContext;
+ }
/**
* set image Component size
*/
diff --git a/imageknife/src/main/ets/components/imageknife/holder/ErrorHolderManager.ets b/imageknife/src/main/ets/components/imageknife/holder/ErrorHolderManager.ets
index 9732a7b..be06dec 100644
--- a/imageknife/src/main/ets/components/imageknife/holder/ErrorHolderManager.ets
+++ b/imageknife/src/main/ets/components/imageknife/holder/ErrorHolderManager.ets
@@ -77,7 +77,12 @@ export class ErrorHolderManager {
onError("ErrorHolderManager 文件类型为null,请检查数据源arraybuffer")
}
}
- resourceFetch.loadResource(res, suc, onError)
+ let ctx = this.options.getModuleContext();
+ if(ctx != undefined){
+ resourceFetch.loadResource(ctx,res, suc, onError)
+ }else{
+ onError("ErrorHolderManager moduleContext is undefined,please check it!")
+ }
} else {
onError("ErrorHolderManager 输入参数有问题!")
}
diff --git a/imageknife/src/main/ets/components/imageknife/holder/PlaceHolderManager.ets b/imageknife/src/main/ets/components/imageknife/holder/PlaceHolderManager.ets
index 612a10f..b9c7404 100644
--- a/imageknife/src/main/ets/components/imageknife/holder/PlaceHolderManager.ets
+++ b/imageknife/src/main/ets/components/imageknife/holder/PlaceHolderManager.ets
@@ -73,7 +73,12 @@ export class PlaceHolderManager {
break;
}
}
- resourceFetch.loadResource(res, suc, onError)
+ let ctx = this.options.getModuleContext();
+ if(ctx != undefined){
+ resourceFetch.loadResource(ctx,res, suc, onError)
+ }else{
+ onError("PlaceHolderManager moduleContext is undefined,please check it!")
+ }
} else {
onError("PlaceHolderManager 输入参数有问题!")
}
diff --git a/imageknife/src/main/ets/components/imageknife/holder/RetryHolderManager.ets b/imageknife/src/main/ets/components/imageknife/holder/RetryHolderManager.ets
index aaf2245..aad1ee4 100644
--- a/imageknife/src/main/ets/components/imageknife/holder/RetryHolderManager.ets
+++ b/imageknife/src/main/ets/components/imageknife/holder/RetryHolderManager.ets
@@ -73,9 +73,14 @@ export class RetryHolderManager {
break;
}
}
- resourceFetch.loadResource(res, suc, onError)
+ let ctx = this.options.getModuleContext();
+ if(ctx != undefined){
+ resourceFetch.loadResource(ctx,res, suc, onError)
+ }else{
+ onError("RetryHolderManager moduleContext is undefined,please check it!")
+ }
} else {
- onError("PlaceHolderManager 输入参数有问题!")
+ onError("RetryHolderManager 输入参数有问题!")
}
}
}
diff --git a/imageknife/src/main/ets/components/imageknife/networkmanage/NetworkDownloadClient.ets b/imageknife/src/main/ets/components/imageknife/networkmanage/NetworkDownloadClient.ets
index 6733caa..ae7d9c5 100644
--- a/imageknife/src/main/ets/components/imageknife/networkmanage/NetworkDownloadClient.ets
+++ b/imageknife/src/main/ets/components/imageknife/networkmanage/NetworkDownloadClient.ets
@@ -43,7 +43,7 @@ export class NetworkDownloadClient implements IDataFetch {
enableMetered: true,
};
- loadRequest.downloadFile( (ImageKnifeGlobal.getInstance().getHapContext() as common.BaseContext ), downloadConfig).then((downloadTask:loadRequest.DownloadTask) => {
+ loadRequest.downloadFile( (request.getModuleContext() as common.BaseContext ), downloadConfig).then((downloadTask:loadRequest.DownloadTask) => {
if (downloadTask) {
let loadTask:loadRequest.DownloadTask | null = downloadTask;
diff --git a/imageknife/src/main/ets/components/imageknife/requestmanage/RequestManager.ets b/imageknife/src/main/ets/components/imageknife/requestmanage/RequestManager.ets
index a7615ba..4e605f3 100644
--- a/imageknife/src/main/ets/components/imageknife/requestmanage/RequestManager.ets
+++ b/imageknife/src/main/ets/components/imageknife/requestmanage/RequestManager.ets
@@ -241,7 +241,13 @@ export class RequestManager {
}
}
}
- this.mIResourceFetch.loadResource(request.loadSrc as Resource, success, onError);
+ let ctx = request.getModuleContext();
+ if(ctx!=undefined){
+ this.mIResourceFetch.loadResource(ctx,request.loadSrc as Resource, success, onError);
+ }else{
+ onError('RequestManager loadSourceFormNative moduleContext is undefined! please check it')
+ }
+
}
// 加载磁盘缓存 原图
private loadDiskFromSource(request: RequestOption, onComplete:(value:ImageKnifeData)=>void|PromiseLike, onError:(reason?:BusinessError|string)=>void) {
@@ -323,6 +329,13 @@ export class RequestManager {
width: Math.round(this.options.thumbSizeMultiplier * this.options.size.width),
height: Math.round(this.options.thumbSizeMultiplier * this.options.size.height)
})
+ let ctx = this.options.getModuleContext()
+ if(ctx != undefined){
+ thumbOption.setModuleContext(ctx)
+ }else{
+ onError('RequestManager parseDiskFile2PixelMap moduleContext is undefined, please check it!')
+ return
+ }
let thumbCallback = this.options.thumbholderOnComplete;
let thumbError = this.options.thumbholderOnError;
this.options.transformations[0].transform(source, thumbOption,{asyncTransform: (error:BusinessError|string, pixelMap: PixelMap|null) => {
@@ -552,6 +565,13 @@ export class RequestManager {
thumbnailProcess(source:ArrayBuffer, filetype:string, onComplete:(value:ImageKnifeData)=>void|PromiseLike, onError:(reason?:BusinessError|string)=>void){
let thumbOption = new RequestOption();
+ let ctx = this.options.getModuleContext()
+ if(ctx != undefined){
+ thumbOption.setModuleContext(ctx)
+ }else{
+ onError('RequestManager thumbnailProcess moduleContext is undefined, please check it!')
+ return
+ }
thumbOption.setImageViewSize({
width: Math.round(this.options.thumbSizeMultiplier * this.options.size.width),
height: Math.round(this.options.thumbSizeMultiplier * this.options.size.height)
diff --git a/imageknife/src/main/ets/components/imageknife/resourcemanage/IResourceFetch.ets b/imageknife/src/main/ets/components/imageknife/resourcemanage/IResourceFetch.ets
index 70057e5..1c65221 100644
--- a/imageknife/src/main/ets/components/imageknife/resourcemanage/IResourceFetch.ets
+++ b/imageknife/src/main/ets/components/imageknife/resourcemanage/IResourceFetch.ets
@@ -14,7 +14,9 @@
*/
import { BusinessError } from '@ohos.base'
+import common from '@ohos.app.ability.common';
+
// 本地资源解析抽象接口
export interface IResourceFetch {
- loadResource:(res: Resource, onCompleteFunction:(value:T)=>void | PromiseLike, onErrorFunction:(reason?:BusinessError|string)=>void)=>void;
+ loadResource:(context:common.UIAbilityContext ,res: Resource, onCompleteFunction:(value:T)=>void | PromiseLike, onErrorFunction:(reason?:BusinessError|string)=>void)=>void;
}
\ No newline at end of file
diff --git a/imageknife/src/main/ets/components/imageknife/resourcemanage/ParseResClient.ets b/imageknife/src/main/ets/components/imageknife/resourcemanage/ParseResClient.ets
index fd27e74..6faadc2 100644
--- a/imageknife/src/main/ets/components/imageknife/resourcemanage/ParseResClient.ets
+++ b/imageknife/src/main/ets/components/imageknife/resourcemanage/ParseResClient.ets
@@ -20,11 +20,11 @@ import { ImageKnifeGlobal } from '../ImageKnifeGlobal';
import { BusinessError } from '@ohos.base'
import common from '@ohos.app.ability.common';
export class ParseResClient implements IResourceFetch {
- loadResource(res: Resource, onCompleteFunction:(value:ArrayBuffer)=>void | PromiseLike, onErrorFunction:(reason?:BusinessError|string)=>void) {
+ loadResource(context:common.UIAbilityContext,res: Resource, onCompleteFunction:(value:ArrayBuffer)=>void | PromiseLike, onErrorFunction:(reason?:BusinessError|string)=>void) {
let resId = res.id;
let resType = res.type;
if (resType == ResourceTypeEts.MEDIA) {
- ((ImageKnifeGlobal.getInstance().getHapContext() as common.UIAbilityContext).resourceManager as resourceManager.ResourceManager)
+ (context.resourceManager as resourceManager.ResourceManager)
.getMediaContent(resId)
.then(data => {
let arrayBuffer = this.typedArrayToBuffer(data);
diff --git a/imageknife/src/main/ets/components/imageknife/resourcemanage/ParseResClientBase64.ets b/imageknife/src/main/ets/components/imageknife/resourcemanage/ParseResClientBase64.ets
index 6f563b5..983a982 100644
--- a/imageknife/src/main/ets/components/imageknife/resourcemanage/ParseResClientBase64.ets
+++ b/imageknife/src/main/ets/components/imageknife/resourcemanage/ParseResClientBase64.ets
@@ -19,14 +19,14 @@ import { Base64 } from '../../cache/Base64'
import { BusinessError } from '@ohos.base'
import resourceManager from '@ohos.resourceManager';
import { ImageKnifeGlobal } from '../ImageKnifeGlobal';
+import common from '@ohos.app.ability.common'
export class ParseResClientBase64 implements IResourceFetch {
- loadResource(res: Resource, onCompleteFunction: (value: ArrayBuffer) => void | PromiseLike, onErrorFunction: (reason?: BusinessError | string) => void) {
+ loadResource(context:common.UIAbilityContext,res: Resource, onCompleteFunction: (value: ArrayBuffer) => void | PromiseLike, onErrorFunction: (reason?: BusinessError | string) => void) {
let resId = res.id;
let resType = res.type;
if (resType == ResourceTypeEts.MEDIA) {
- ((ImageKnifeGlobal.getInstance()
- .getHapContext() as Record).resourceManager as resourceManager.ResourceManager)
+ (context.resourceManager as resourceManager.ResourceManager)
.getMediaContentBase64(resId)
.then(data => {
let matchReg = ';base64,';
diff --git a/imageknife/src/main/ets/components/imageknife/transform/MaskTransformation.ets b/imageknife/src/main/ets/components/imageknife/transform/MaskTransformation.ets
index d02ff33..e55fe35 100644
--- a/imageknife/src/main/ets/components/imageknife/transform/MaskTransformation.ets
+++ b/imageknife/src/main/ets/components/imageknife/transform/MaskTransformation.ets
@@ -78,7 +78,7 @@ export class MaskTransformation implements BaseTransform {
}
imageSource.createPixelMap(options)
.then(data => {
- this.openInternal(data, targetWidth, targetHeight, func)
+ this.openInternal(request, data, targetWidth, targetHeight, func)
imageSource.release()
})
.catch((e:BusinessError )=> {
@@ -87,13 +87,13 @@ export class MaskTransformation implements BaseTransform {
})
}
- private openInternal(bitmap: PixelMap, width: number, height: number, func?: AsyncTransform) {
+ private openInternal(request: RequestOption,bitmap: PixelMap, width: number, height: number, func?: AsyncTransform) {
if (!this._mResourceData) {
if(func != undefined){
func.asyncTransform("MaskTransformation resource is empty", null)
}
}
- let context = (ImageKnifeGlobal.getInstance().getHapContext() as common.UIAbilityContext)
+ let context = (request.getModuleContext() as common.UIAbilityContext)
if(context != undefined){
let resourceManager = context.resourceManager as resourceManager.ResourceManager
if(resourceManager != undefined && this._mResourceData != undefined)
diff --git a/imageknife/src/main/ets/components/imageknife/utils/gif/parse/GIFParse.ts b/imageknife/src/main/ets/components/imageknife/utils/gif/parse/GIFParse.ts
deleted file mode 100644
index 1883e30..0000000
--- a/imageknife/src/main/ets/components/imageknife/utils/gif/parse/GIFParse.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (C) 2022 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import { decompressFrames, parseGIF } from './index'
-import { fixLoseGCE} from '../utils/ParseHelperUtils'
-import { GIFFrame } from '../GIFFrame'
-
-export function parseBufferToFrame(arraybuffer: ArrayBuffer): GIFFrame[] {
- let gif = parseGIF(arraybuffer)
- fixLoseGCE(gif)
- let origins = decompressFrames(gif, true)
- return origins as GIFFrame[];
-
-}
-
-
-
diff --git a/imageknife/src/main/ets/components/imageknife/utils/gif/parse/deinterlace.js b/imageknife/src/main/ets/components/imageknife/utils/gif/parse/deinterlace.js
deleted file mode 100644
index 1344785..0000000
--- a/imageknife/src/main/ets/components/imageknife/utils/gif/parse/deinterlace.js
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (C) 2022 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Deinterlace function from https://github.com/shachaf/jsgif
- */
-export function deinterlace(pixels, width) {
- var newPixels = new Array(pixels.length);
- var rows = pixels.length / width;
-
- var cpRow = function cpRow(toRow, fromRow) {
- var fromPixels = pixels.slice(fromRow * width, (fromRow + 1) * width);
- newPixels.splice.apply(newPixels, [toRow * width, width].concat(fromPixels));
- }; // See appendix E.
-
-
- var offsets = [0, 4, 2, 1];
- var steps = [8, 8, 4, 2];
- var fromRow = 0;
-
- for (var pass = 0; pass < 4; pass++) {
- for (var toRow = offsets[pass]; toRow < rows; toRow += steps[pass]) {
- cpRow(toRow, fromRow);
- fromRow++;
- }
- }
- return newPixels;
-};
diff --git a/imageknife/src/main/ets/components/imageknife/utils/gif/parse/index.js b/imageknife/src/main/ets/components/imageknife/utils/gif/parse/index.js
deleted file mode 100644
index 1fc1e99..0000000
--- a/imageknife/src/main/ets/components/imageknife/utils/gif/parse/index.js
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright (C) 2022 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import { _default } from '../../../../3rd_party/jsbinaryschemaparser/lib/schemas/gif'
-import { conditional, loop, parse } from '../../../../3rd_party/jsbinaryschemaparser/lib/index'
-import {
- buildStream,
- peekByte,
- peekBytes,
- readArray,
- readBits,
- readByte,
- readBytes,
- readString,
- readUnsigned
-} from '../../../../3rd_party/jsbinaryschemaparser/lib/parsers/uint8'
-import { deinterlace } from './deinterlace'
-import { lzw } from './lzw'
-
-var _gif = _interopRequireDefault(_default);
-
-function _interopRequireDefault(obj) {
- return { "default": obj };
-}
-
-export function parseGIF(arrayBuffer) {
- var byteData = new Uint8Array(arrayBuffer);
- return (parse)((buildStream)(byteData), _gif["default"]);
-};
-
-
-export function generatePatch(image) {
- var totalPixels = image.pixels.length;
- var patchData = new Uint8ClampedArray(totalPixels * 4);
-
- for (var i = 0; i < totalPixels; i++) {
- var pos = i * 4;
- var colorIndex = image.pixels[i];
- var color = image.colorTable[colorIndex] || [0, 0, 0];
- patchData[pos] = color[2];
- patchData[pos + 1] = color[1];
- patchData[pos + 2] = color[0];
- patchData[pos + 3] = colorIndex !== image.transparentIndex ? 255 : 0;
- }
-
- return patchData;
-};
-
-export function decompressFrame(frame, gct, buildImagePatch) {
- if (!frame.image) {
-
- return;
- }
-
- var image = frame.image; // get the number of pixels
-
- var totalPixels = image.descriptor.width * image.descriptor.height; // do lzw decompression
-
- var pixels = lzw(image.data.minCodeSize, image.data.blocks, totalPixels); // deal with interlacing if necessary
-
- if (image.descriptor.lct.interlaced) {
- pixels = deinterlace(pixels, image.descriptor.width);
- }
-
- var resultImage = {
- pixels: pixels,
- dims: {
- top: frame.image.descriptor.top,
- left: frame.image.descriptor.left,
- width: frame.image.descriptor.width,
- height: frame.image.descriptor.height
- }
- }; // color table
-
- if (image.descriptor.lct && image.descriptor.lct.exists) {
- resultImage['colorTable'] = image.lct;
- } else {
- resultImage['colorTable'] = gct;
- } // add per frame relevant gce information
-
-
- if (frame.gce) {
- resultImage['delay'] = (frame.gce.delay || 10) * 10; // convert to ms
-
- resultImage['disposalType'] = frame.gce.extras.disposal; // transparency
-
- if (frame.gce.extras.transparentColorGiven) {
- resultImage['transparentIndex'] = frame.gce.transparentColorIndex;
- }
- } // create canvas usable imagedata if desired
-
-
- if (buildImagePatch) {
- resultImage['patch'] = generatePatch(resultImage);
- resultImage['colorTable'] = null
- resultImage['transparentIndex'] = null
- resultImage['pixels'] = null
- }
-
- return resultImage;
-};
-
-
-export function decompressFrames(parsedGif, buildImagePatches) {
- return parsedGif.frames.filter(function (f) {
- return f.image;
- }).map(function (f) {
- return decompressFrame(f, parsedGif.gct, buildImagePatches);
- });
-};
-
diff --git a/imageknife/src/main/ets/components/imageknife/utils/gif/parse/lzw.js b/imageknife/src/main/ets/components/imageknife/utils/gif/parse/lzw.js
deleted file mode 100644
index 113a8d4..0000000
--- a/imageknife/src/main/ets/components/imageknife/utils/gif/parse/lzw.js
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright (C) 2022 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * javascript port of java LZW decompression
- * Original java author url: https://gist.github.com/devunwired/4479231
- */
-export function lzw(minCodeSize, data, pixelCount) {
- var MAX_STACK_SIZE = 4096;
- var nullCode = -1;
- var npix = pixelCount;
- var available;
- var clear;
- var codeMask;
- var codeSize;
- var endOfInformation;
- var inCode;
- var oldCode;
- var bits;
- var code;
- var i;
- var datum;
- var dataSize;
- var first;
- var top;
- var bi;
- var pi;
- var dstPixels = new Array(pixelCount);
- var prefix = new Array(MAX_STACK_SIZE);
- var suffix = new Array(MAX_STACK_SIZE);
- var pixelStack = new Array(MAX_STACK_SIZE + 1); // Initialize GIF data stream decoder.
-
- dataSize = minCodeSize;
- clear = 1 << dataSize;
- endOfInformation = clear + 1;
- available = clear + 2;
- oldCode = nullCode;
- codeSize = dataSize + 1;
- codeMask = (1 << codeSize) - 1;
-
- for (code = 0; code < clear; code++) {
- prefix[code] = 0;
- suffix[code] = code;
- } // Decode GIF pixel stream.
-
-
- var datum, bits, count, first, top, pi, bi;
- datum = bits = count = first = top = pi = bi = 0;
-
- for (i = 0; i < npix;) {
- if (top === 0) {
- if (bits < codeSize) {
- // get the next byte
- datum += data[bi] << bits;
- bits += 8;
- bi++;
- continue;
- } // Get the next code.
-
-
- code = datum & codeMask;
- datum >>= codeSize;
- bits -= codeSize; // Interpret the code
-
- if (code > available || code == endOfInformation) {
- break;
- }
-
- if (code == clear) {
- // Reset decoder.
- codeSize = dataSize + 1;
- codeMask = (1 << codeSize) - 1;
- available = clear + 2;
- oldCode = nullCode;
- continue;
- }
-
- if (oldCode == nullCode) {
- pixelStack[top++] = suffix[code];
- oldCode = code;
- first = code;
- continue;
- }
-
- inCode = code;
-
- if (code == available) {
- pixelStack[top++] = first;
- code = oldCode;
- }
-
- while (code > clear) {
- pixelStack[top++] = suffix[code];
- code = prefix[code];
- }
-
- first = suffix[code] & 0xff;
- pixelStack[top++] = first; // add a new string to the table, but only if space is available
- // if not, just continue with current table until a clear code is found
- // (deferred clear code implementation as per GIF spec)
-
- if (available < MAX_STACK_SIZE) {
- prefix[available] = oldCode;
- suffix[available] = first;
- available++;
-
- if ((available & codeMask) === 0 && available < MAX_STACK_SIZE) {
- codeSize++;
- codeMask += available;
- }
- }
-
- oldCode = inCode;
- } // Pop a pixel off the pixel stack.
-
-
- top--;
- dstPixels[pi++] = pixelStack[top];
- i++;
- }
-
- for (i = pi; i < npix; i++) {
- dstPixels[i] = 0; // clear missing pixels
- }
-
- return dstPixels;
-};
diff --git a/imageknife/src/main/ets/components/imageknife/utils/gif/utils/ParseHelperUtils.ts b/imageknife/src/main/ets/components/imageknife/utils/gif/utils/ParseHelperUtils.ts
deleted file mode 100644
index 7db1c2e..0000000
--- a/imageknife/src/main/ets/components/imageknife/utils/gif/utils/ParseHelperUtils.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright (C) 2022 Huawei Device Co., Ltd.
- * Licensed under the Apache License, Version 2.0 (the 'License');
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an 'AS IS' BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-export function fixLoseGCE(gif) {
- let currentGce = null;
- for (const frame of gif.frames) {
- currentGce = frame.gce ? frame.gce : currentGce;
- // fix loosing graphic control extension for same frames
- if ("image" in frame && !("gce" in frame)) {
- frame.gce = currentGce;
- }
- }
-}
\ No newline at end of file
diff --git a/library/.gitignore b/library/.gitignore
new file mode 100644
index 0000000..e2713a2
--- /dev/null
+++ b/library/.gitignore
@@ -0,0 +1,6 @@
+/node_modules
+/oh_modules
+/.preview
+/build
+/.cxx
+/.test
\ No newline at end of file
diff --git a/library/build-profile.json5 b/library/build-profile.json5
new file mode 100644
index 0000000..befa101
--- /dev/null
+++ b/library/build-profile.json5
@@ -0,0 +1,10 @@
+{
+ "apiType": 'stageMode',
+ "buildOption": {
+ },
+ "targets": [
+ {
+ "name": "default"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/library/hvigorfile.ts b/library/hvigorfile.ts
new file mode 100644
index 0000000..0e65ea8
--- /dev/null
+++ b/library/hvigorfile.ts
@@ -0,0 +1,2 @@
+// Script for compiling build behavior. It is built in the build plug-in and cannot be modified currently.
+module.exports = require('@ohos/hvigor-ohos-plugin').hspTasks
diff --git a/library/oh-package.json5 b/library/oh-package.json5
new file mode 100644
index 0000000..beeccdc
--- /dev/null
+++ b/library/oh-package.json5
@@ -0,0 +1,11 @@
+{
+ "name": "library",
+ "version": "1.0.0",
+ "description": "Please describe the basic information.",
+ "main": "./src/main/ets/Index.ets",
+ "author": "",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@ohos/imageknife": "file:../imageknife"
+ }
+}
\ No newline at end of file
diff --git a/library/src/main/ets/Index.ets b/library/src/main/ets/Index.ets
new file mode 100644
index 0000000..4daa5e4
--- /dev/null
+++ b/library/src/main/ets/Index.ets
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+// ImageKnife 对外暴露的接口由library提供
+
+/**
+ * cache
+ */
+
+export { FileUtils } from '@ohos/imageknife'
+export { Base64 } from '@ohos/imageknife'
+export { LruCache } from '@ohos/imageknife'
+export { DiskStrategy } from '@ohos/imageknife'
+export { ALL } from '@ohos/imageknife'
+export { AUTOMATIC } from '@ohos/imageknife'
+export { DATA } from '@ohos/imageknife'
+export { NONE } from '@ohos/imageknife'
+export { RESOURCE } from '@ohos/imageknife'
+export { EngineKeyInterface } from '@ohos/imageknife'
+export { EngineKeyFactories } from '@ohos/imageknife'
+
+/**
+ * compress
+ */
+export { CompressBuilder } from '@ohos/imageknife'
+export { OnCompressListener } from '@ohos/imageknife'
+export { OnRenameListener } from '@ohos/imageknife'
+export { CompressDataListener } from '@ohos/imageknife'
+export { CompressionPredicate } from '@ohos/imageknife'
+export { CompressAdapter } from '@ohos/imageknife'
+export { CompressProvider } from '@ohos/imageknife'
+export { DataStringPathProvider } from '@ohos/imageknife'
+export { RecourseProvider } from '@ohos/imageknife'
+
+/**
+ * crop
+ */
+
+export { CropImage } from '@ohos/imageknife'
+export { CropOptions } from '@ohos/imageknife'
+export { PixelMapCrop,Options } from '@ohos/imageknife'
+export { CropCallback } from '@ohos/imageknife'
+
+/**
+ * transform
+ */
+export { BaseTransform } from '@ohos/imageknife'
+export { BlurTransformation } from '@ohos/imageknife'
+export { BrightnessFilterTransformation } from '@ohos/imageknife'
+export { ContrastFilterTransformation } from '@ohos/imageknife'
+export { CropCircleTransformation } from '@ohos/imageknife'
+export { CropCircleWithBorderTransformation,rgbColor } from '@ohos/imageknife'
+export { CropSquareTransformation } from '@ohos/imageknife'
+export { CropTransformation,CropType } from '@ohos/imageknife'
+export { GrayscaleTransformation } from '@ohos/imageknife'
+export { InvertFilterTransformation } from '@ohos/imageknife'
+export { PixelationFilterTransformation } from '@ohos/imageknife'
+export { RotateImageTransformation } from '@ohos/imageknife'
+export { RoundedCornersTransformation,RoundCorner } from '@ohos/imageknife'
+export { SepiaFilterTransformation } from '@ohos/imageknife'
+export { SketchFilterTransformation } from '@ohos/imageknife'
+export { MaskTransformation } from '@ohos/imageknife'
+export { SwirlFilterTransformation } from '@ohos/imageknife'
+export { KuwaharaFilterTransform } from '@ohos/imageknife'
+export { ToonFilterTransform } from '@ohos/imageknife'
+export { VignetteFilterTransform } from '@ohos/imageknife'
+export { TransformUtils } from '@ohos/imageknife'
+export { TransformType } from '@ohos/imageknife'
+export { CenterCrop } from '@ohos/imageknife'
+export { CenterInside } from '@ohos/imageknife'
+export { FitCenter } from '@ohos/imageknife'
+
+/**
+ * pngj
+ */
+export { Pngj } from '@ohos/imageknife'
+export {handler} from '@ohos/imageknife'
+export { UPNG } from '@ohos/imageknife'
+
+
+
+/**
+ * ImageKnife
+ */
+export { ImageKnife } from '@ohos/imageknife'
+export { ImageKnifeGlobal } from '@ohos/imageknife'
+export {RequestOption,Size} from '@ohos/imageknife'
+export {ObjectKey} from '@ohos/imageknife'
+export { ImageKnifeComponent, ScaleType, ScaleTypeHelper } from '@ohos/imageknife'
+export { ImageKnifeDrawFactory } from '@ohos/imageknife'
+export {ImageKnifeOption,CropCircleWithBorder,Crop,GifOptions,TransformOptions} from '@ohos/imageknife'
+export { ImageKnifeData } from '@ohos/imageknife'
+export {IAllCacheInfoCallback,AllCacheInfo,ResourceCacheInfo,MemoryCacheInfo,DataCacheInfo} from '@ohos/imageknife'
+export {IParseImage} from '@ohos/imageknife'
+export {IDataFetch} from '@ohos/imageknife'
+export {ICache} from '@ohos/imageknife'
+export { FileTypeUtil } from '@ohos/imageknife'
+export { ParseImageUtil } from '@ohos/imageknife'
+
+/**
+ * svg parse
+ */
+export { SVGParseImpl } from '@ohos/imageknife'
+
+/**
+ * gif parse
+ */
+export { GIFParseImpl } from '@ohos/imageknife'
+export { GIFFrame } from '@ohos/imageknife'
+
+
+// 自定义组件新增
+// 自定义组件绘制生命周期
+export { IDrawLifeCycle } from '@ohos/imageknife'
+
+// 日志管理
+export { LogUtil } from '@ohos/imageknife'
+
+// 额外开放初始化ImageKnife的方法
+export {InitImageKnife} from '../ets/pages/InitImageKnife'
\ No newline at end of file
diff --git a/library/src/main/ets/pages/Index.ets b/library/src/main/ets/pages/Index.ets
new file mode 100644
index 0000000..86f8d60
--- /dev/null
+++ b/library/src/main/ets/pages/Index.ets
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import {ImageKnifeGlobal, ImageKnifeComponent, ImageKnifeOption, FileUtils } from '@ohos/imageknife'
+import common from '@ohos.app.ability.common'
+
+@Entry
+@Component
+struct Index {
+
+
+ @State imageOption1:ImageKnifeOption = {
+ loadSrc: $r('app.media.icon'),
+ context: getContext(this).createModuleContext('library') as common.UIAbilityContext
+ }
+ @State imageOption2:ImageKnifeOption = {
+ loadSrc: $r('app.media.icon'),
+ context: getContext(this).createModuleContext('library') as common.UIAbilityContext
+ }
+ @State imageOption3:ImageKnifeOption = {
+ loadSrc: $r('app.media.icon'),
+ context: getContext(this).createModuleContext('library') as common.UIAbilityContext
+ }
+
+ build() {
+ Scroll() {
+ Column() {
+ Button('点击加载Resource').onClick(()=>{
+ this.imageOption1 = {
+ loadSrc: $r('app.media.setting'),
+ // 只要涉及resource加载 在HSP中都要带上context属性
+ context: getContext(this).createModuleContext('library') as common.UIAbilityContext
+ }
+ })
+ ImageKnifeComponent({imageKnifeOption:this.imageOption1}).width(300).height(300).backgroundColor(Color.Pink)
+ Button('点击加载网络图片').onClick(()=>{
+ this.imageOption2 = {
+ loadSrc: 'https://hbimg.huabanimg.com/cc6af25f8d782d3cf3122bef4e61571378271145735e9-vEVggB',
+ context: getContext(this).createModuleContext('library') as common.UIAbilityContext
+ }
+ })
+ ImageKnifeComponent({imageKnifeOption:this.imageOption2}).width(300).height(300).backgroundColor(Color.Pink)
+
+ Button('点击加载本地文件').onClick(()=>{
+ getContext(this).createModuleContext('library').resourceManager.getMediaContent($r('app.media.setting').id).then((data:Uint8Array)=>{
+ let ctx = ImageKnifeGlobal.getInstance().getHapContext() as common.UIAbilityContext;
+ let path = ctx.filesDir+"/set.jpeg";
+ FileUtils.getInstance().writeFile(path,data.buffer)
+ FileUtils.getInstance().readFilePicAsync(path).then(buffer=>{
+ this.imageOption3 = {
+ loadSrc: path,
+ context: getContext(this).createModuleContext('library') as common.UIAbilityContext
+ }
+ })
+ })
+
+ })
+ ImageKnifeComponent({imageKnifeOption:this.imageOption3}).width(300).height(300).backgroundColor(Color.Pink)
+
+
+
+ }.width('100%')
+
+ }
+ .width('100%')
+ .height('100%')
+ }
+}
\ No newline at end of file
diff --git a/library/src/main/ets/pages/InitImageKnife.ets b/library/src/main/ets/pages/InitImageKnife.ets
new file mode 100644
index 0000000..05fc782
--- /dev/null
+++ b/library/src/main/ets/pages/InitImageKnife.ets
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2023 Huawei Device Co., Ltd.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import common from '@ohos.app.ability.common'
+import { ImageKnifeComponent, ImageKnifeOption, FileUtils } from '@ohos/imageknife'
+import { ImageKnifeGlobal,ImageKnife,ImageKnifeDrawFactory,LogUtil } from '@ohos/imageknife'
+export class InitImageKnife{
+ static init(entryContext:common.UIAbilityContext){
+ ImageKnife.with(entryContext);
+ }
+}
\ No newline at end of file
diff --git a/library/src/main/module.json5 b/library/src/main/module.json5
new file mode 100644
index 0000000..74570f9
--- /dev/null
+++ b/library/src/main/module.json5
@@ -0,0 +1,13 @@
+{
+ "module": {
+ "name": "library",
+ "type": "shared",
+ "description": "$string:shared_desc",
+ "deviceTypes": [
+ "default",
+ "tablet"
+ ],
+ "deliveryWithInstall": true,
+ "pages": "$profile:main_pages"
+ }
+}
\ No newline at end of file
diff --git a/library/src/main/resources/base/element/color.json b/library/src/main/resources/base/element/color.json
new file mode 100644
index 0000000..1bbc9aa
--- /dev/null
+++ b/library/src/main/resources/base/element/color.json
@@ -0,0 +1,8 @@
+{
+ "color": [
+ {
+ "name": "white",
+ "value": "#FFFFFF"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/library/src/main/resources/base/element/string.json b/library/src/main/resources/base/element/string.json
new file mode 100644
index 0000000..98e1d8a
--- /dev/null
+++ b/library/src/main/resources/base/element/string.json
@@ -0,0 +1,8 @@
+{
+ "string": [
+ {
+ "name": "shared_desc",
+ "value": "description"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/library/src/main/resources/base/media/icon.png b/library/src/main/resources/base/media/icon.png
new file mode 100644
index 0000000..ce307a8
Binary files /dev/null and b/library/src/main/resources/base/media/icon.png differ
diff --git a/library/src/main/resources/base/media/setting.jpeg b/library/src/main/resources/base/media/setting.jpeg
new file mode 100644
index 0000000..57e67dd
Binary files /dev/null and b/library/src/main/resources/base/media/setting.jpeg differ
diff --git a/library/src/main/resources/base/profile/main_pages.json b/library/src/main/resources/base/profile/main_pages.json
new file mode 100644
index 0000000..1898d94
--- /dev/null
+++ b/library/src/main/resources/base/profile/main_pages.json
@@ -0,0 +1,5 @@
+{
+ "src": [
+ "pages/Index"
+ ]
+}
diff --git a/oh-package.json5 b/oh-package.json5
index 0194b78..5360e8c 100644
--- a/oh-package.json5
+++ b/oh-package.json5
@@ -6,6 +6,6 @@
"name": "imageknife",
"description": "example description",
"repository": {},
- "version": "2.1.1-rc.3",
+ "version": "2.1.1-rc.4",
"dependencies": {}
}