1.js-binary-schema-parser使用源码依赖方式引入
Signed-off-by: zhoulisheng1 <zhoulisheng1@huawei.com>
This commit is contained in:
parent
2dd27ab4e9
commit
887d740b48
|
@ -12,7 +12,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import { IParseGif } from './IParseGif'
|
||||
import type { IParseGif } from './IParseGif'
|
||||
import { GIFFrame } from './GIFFrame'
|
||||
import { LoadType } from '../../../../../../../GifWorker'
|
||||
import { parseBufferToFrame } from './parse/GIFParse'
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
"use strict";
|
||||
|
||||
|
||||
|
||||
|
||||
var parse = function parse(stream, schema) {
|
||||
var result = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
|
||||
var parent = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : result;
|
||||
|
||||
if (Array.isArray(schema)) {
|
||||
schema.forEach(function (partSchema) {
|
||||
return parse(stream, partSchema, result, parent);
|
||||
});
|
||||
} else if (typeof schema === 'function') {
|
||||
schema(stream, result, parent, parse);
|
||||
} else {
|
||||
var key = Object.keys(schema)[0];
|
||||
|
||||
if (Array.isArray(schema[key])) {
|
||||
parent[key] = {};
|
||||
parse(stream, schema[key], result, parent[key]);
|
||||
} else {
|
||||
parent[key] = schema[key](stream, result, parent, parse);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
|
||||
var conditional = function conditional(schema, conditionFunc) {
|
||||
return function (stream, result, parent, parse) {
|
||||
if (conditionFunc(stream, result, parent)) {
|
||||
parse(stream, schema, result, parent);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
var loop = function loop(schema, continueFunc) {
|
||||
return function (stream, result, parent, parse) {
|
||||
var arr = [];
|
||||
var lastStreamPos = stream.pos;
|
||||
|
||||
while (continueFunc(stream, result, parent)) {
|
||||
var newParent = {};
|
||||
parse(stream, schema, result, newParent); // cases when whole file is parsed but no termination is there and stream position is not getting updated as well
|
||||
// it falls into infinite recursion, null check to avoid the same
|
||||
|
||||
if (stream.pos === lastStreamPos) {
|
||||
break;
|
||||
}
|
||||
|
||||
lastStreamPos = stream.pos;
|
||||
arr.push(newParent);
|
||||
}
|
||||
|
||||
return arr;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
export{
|
||||
loop,
|
||||
conditional,
|
||||
parse
|
||||
}
|
||||
|
|
@ -0,0 +1,114 @@
|
|||
"use strict";
|
||||
|
||||
|
||||
// Default stream and parsers for Uint8TypedArray data type
|
||||
var buildStream = function buildStream(uint8Data) {
|
||||
return {
|
||||
data: uint8Data,
|
||||
pos: 0
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
var readByte = function readByte() {
|
||||
return function (stream) {
|
||||
return stream.data[stream.pos++];
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
var peekByte = function peekByte() {
|
||||
var offset = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
|
||||
return function (stream) {
|
||||
return stream.data[stream.pos + offset];
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
var readBytes = function readBytes(length) {
|
||||
return function (stream) {
|
||||
return stream.data.subarray(stream.pos, stream.pos += length);
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var peekBytes = function peekBytes(length) {
|
||||
return function (stream) {
|
||||
return stream.data.subarray(stream.pos, stream.pos + length);
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var readString = function readString(length) {
|
||||
return function (stream) {
|
||||
return Array.from(readBytes(length)(stream)).map(function (value) {
|
||||
return String.fromCharCode(value);
|
||||
}).join('');
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
var readUnsigned = function readUnsigned(littleEndian) {
|
||||
return function (stream) {
|
||||
var bytes = readBytes(2)(stream);
|
||||
return littleEndian ? (bytes[1] << 8) + bytes[0] : (bytes[0] << 8) + bytes[1];
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
var readArray = function readArray(byteSize, totalOrFunc) {
|
||||
return function (stream, result, parent) {
|
||||
var total = typeof totalOrFunc === 'function' ? totalOrFunc(stream, result, parent) : totalOrFunc;
|
||||
var parser = readBytes(byteSize);
|
||||
var arr = new Array(total);
|
||||
|
||||
for (var i = 0; i < total; i++) {
|
||||
arr[i] = parser(stream);
|
||||
}
|
||||
|
||||
return arr;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var subBitsTotal = function subBitsTotal(bits, startIndex, length) {
|
||||
var result = 0;
|
||||
|
||||
for (var i = 0; i < length; i++) {
|
||||
result += bits[startIndex + i] && Math.pow(2, length - i - 1);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
var readBits = function readBits(schema) {
|
||||
return function (stream) {
|
||||
var _byte = readByte()(stream); // convert the byte to bit array
|
||||
|
||||
|
||||
var bits = new Array(8);
|
||||
|
||||
for (var i = 0; i < 8; i++) {
|
||||
bits[7 - i] = !!(_byte & 1 << i);
|
||||
} // convert the bit array to values based on the schema
|
||||
|
||||
|
||||
return Object.keys(schema).reduce(function (res, key) {
|
||||
var def = schema[key];
|
||||
|
||||
if (def.length) {
|
||||
res[key] = subBitsTotal(bits, def.index, def.length);
|
||||
} else {
|
||||
res[key] = bits[def.index];
|
||||
}
|
||||
|
||||
return res;
|
||||
}, {});
|
||||
};
|
||||
};
|
||||
export {buildStream,readByte,peekByte,readBytes,peekBytes,readString,readUnsigned,readArray,readBits}
|
|
@ -0,0 +1,220 @@
|
|||
"use strict";
|
||||
|
||||
|
||||
import {
|
||||
loop,
|
||||
conditional,
|
||||
parse
|
||||
} from "../";
|
||||
|
||||
import {buildStream,readByte,peekByte,readBytes,peekBytes,readString,readUnsigned,readArray,readBits} from "../parsers/uint8";
|
||||
|
||||
// a set of 0x00 terminated subblocks
|
||||
var subBlocksSchema = {
|
||||
blocks: function blocks(stream) {
|
||||
var terminator = 0x00;
|
||||
var chunks = [];
|
||||
var streamSize = stream.data.length;
|
||||
var total = 0;
|
||||
|
||||
for (var size = (0, readByte)()(stream); size !== terminator; size = (0, readByte)()(stream)) {
|
||||
// size becomes undefined for some case when file is corrupted and terminator is not proper
|
||||
// null check to avoid recursion
|
||||
if (!size) break; // catch corrupted files with no terminator
|
||||
|
||||
if (stream.pos + size >= streamSize) {
|
||||
var availableSize = streamSize - stream.pos;
|
||||
chunks.push((0, readBytes)(availableSize)(stream));
|
||||
total += availableSize;
|
||||
break;
|
||||
}
|
||||
|
||||
chunks.push((0, readBytes)(size)(stream));
|
||||
total += size;
|
||||
}
|
||||
|
||||
var result = new Uint8Array(total);
|
||||
var offset = 0;
|
||||
|
||||
for (var i = 0; i < chunks.length; i++) {
|
||||
result.set(chunks[i], offset);
|
||||
offset += chunks[i].length;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}; // global control extension
|
||||
|
||||
var gceSchema = (0, conditional)({
|
||||
gce: [{
|
||||
codes: (0, readBytes)(2)
|
||||
}, {
|
||||
byteSize: (0, readByte)()
|
||||
}, {
|
||||
extras: (0, readBits)({
|
||||
future: {
|
||||
index: 0,
|
||||
length: 3
|
||||
},
|
||||
disposal: {
|
||||
index: 3,
|
||||
length: 3
|
||||
},
|
||||
userInput: {
|
||||
index: 6
|
||||
},
|
||||
transparentColorGiven: {
|
||||
index: 7
|
||||
}
|
||||
})
|
||||
}, {
|
||||
delay: (0, readUnsigned)(true)
|
||||
}, {
|
||||
transparentColorIndex: (0, readByte)()
|
||||
}, {
|
||||
terminator: (0, readByte)()
|
||||
}]
|
||||
}, function (stream) {
|
||||
var codes = (0, peekBytes)(2)(stream);
|
||||
return codes[0] === 0x21 && codes[1] === 0xf9;
|
||||
}); // image pipeline block
|
||||
|
||||
var imageSchema = (0, conditional)({
|
||||
image: [{
|
||||
code: (0, readByte)()
|
||||
}, {
|
||||
descriptor: [{
|
||||
left: (0, readUnsigned)(true)
|
||||
}, {
|
||||
top: (0, readUnsigned)(true)
|
||||
}, {
|
||||
width: (0, readUnsigned)(true)
|
||||
}, {
|
||||
height: (0, readUnsigned)(true)
|
||||
}, {
|
||||
lct: (0, readBits)({
|
||||
exists: {
|
||||
index: 0
|
||||
},
|
||||
interlaced: {
|
||||
index: 1
|
||||
},
|
||||
sort: {
|
||||
index: 2
|
||||
},
|
||||
future: {
|
||||
index: 3,
|
||||
length: 2
|
||||
},
|
||||
size: {
|
||||
index: 5,
|
||||
length: 3
|
||||
}
|
||||
})
|
||||
}]
|
||||
}, (0, conditional)({
|
||||
lct: (0, readArray)(3, function (stream, result, parent) {
|
||||
return Math.pow(2, parent.descriptor.lct.size + 1);
|
||||
})
|
||||
}, function (stream, result, parent) {
|
||||
return parent.descriptor.lct.exists;
|
||||
}), {
|
||||
data: [{
|
||||
minCodeSize: (0, readByte)()
|
||||
}, subBlocksSchema]
|
||||
}]
|
||||
}, function (stream) {
|
||||
return (0, peekByte)()(stream) === 0x2c;
|
||||
}); // plain text block
|
||||
|
||||
var textSchema = (0, conditional)({
|
||||
text: [{
|
||||
codes: (0, readBytes)(2)
|
||||
}, {
|
||||
blockSize: (0, readByte)()
|
||||
}, {
|
||||
preData: function preData(stream, result, parent) {
|
||||
return (0, readBytes)(parent.text.blockSize)(stream);
|
||||
}
|
||||
}, subBlocksSchema]
|
||||
}, function (stream) {
|
||||
var codes = (0, peekBytes)(2)(stream);
|
||||
return codes[0] === 0x21 && codes[1] === 0x01;
|
||||
}); // application block
|
||||
|
||||
var applicationSchema = (0, conditional)({
|
||||
application: [{
|
||||
codes: (0, readBytes)(2)
|
||||
}, {
|
||||
blockSize: (0, readByte)()
|
||||
}, {
|
||||
id: function id(stream, result, parent) {
|
||||
return (0, readString)(parent.blockSize)(stream);
|
||||
}
|
||||
}, subBlocksSchema]
|
||||
}, function (stream) {
|
||||
var codes = (0, peekBytes)(2)(stream);
|
||||
return codes[0] === 0x21 && codes[1] === 0xff;
|
||||
}); // comment block
|
||||
|
||||
var commentSchema = (0, conditional)({
|
||||
comment: [{
|
||||
codes: (0, readBytes)(2)
|
||||
}, subBlocksSchema]
|
||||
}, function (stream) {
|
||||
var codes = (0, peekBytes)(2)(stream);
|
||||
return codes[0] === 0x21 && codes[1] === 0xfe;
|
||||
});
|
||||
var schema = [{
|
||||
header: [{
|
||||
signature: (0, readString)(3)
|
||||
}, {
|
||||
version: (0, readString)(3)
|
||||
}]
|
||||
}, {
|
||||
lsd: [{
|
||||
width: (0, readUnsigned)(true)
|
||||
}, {
|
||||
height: (0, readUnsigned)(true)
|
||||
}, {
|
||||
gct: (0, readBits)({
|
||||
exists: {
|
||||
index: 0
|
||||
},
|
||||
resolution: {
|
||||
index: 1,
|
||||
length: 3
|
||||
},
|
||||
sort: {
|
||||
index: 4
|
||||
},
|
||||
size: {
|
||||
index: 5,
|
||||
length: 3
|
||||
}
|
||||
})
|
||||
}, {
|
||||
backgroundColorIndex: (0, readByte)()
|
||||
}, {
|
||||
pixelAspectRatio: (0, readByte)()
|
||||
}]
|
||||
}, (0, conditional)({
|
||||
gct: (0, readArray)(3, function (stream, result) {
|
||||
return Math.pow(2, result.lsd.gct.size + 1);
|
||||
})
|
||||
}, function (stream, result) {
|
||||
return result.lsd.gct.exists;
|
||||
}), // content frames
|
||||
{
|
||||
frames: (0, loop)([gceSchema, applicationSchema, commentSchema, imageSchema, textSchema], function (stream) {
|
||||
var nextCode = (0, peekByte)()(stream); // rather than check for a terminator, we should check for the existence
|
||||
// of an ext or image block to avoid infinite loops
|
||||
//var terminator = 0x3B;
|
||||
//return nextCode !== terminator;
|
||||
|
||||
return nextCode === 0x21 || nextCode === 0x2c;
|
||||
})
|
||||
}];
|
||||
var _default = schema;
|
||||
|
||||
export {_default}
|
|
@ -13,24 +13,34 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
var _gif = _interopRequireDefault(require("js-binary-schema-parser/lib/schemas/gif"));
|
||||
|
||||
var _jsBinarySchemaParser = require("js-binary-schema-parser");
|
||||
|
||||
var _uint = require("js-binary-schema-parser/lib/parsers/uint8");
|
||||
|
||||
import { _default } from '../jsbinaryschemaparser/lib/schemas/gif'
|
||||
import { conditional, loop, parse } from '../jsbinaryschemaparser/lib/index'
|
||||
import {
|
||||
buildStream,
|
||||
peekByte,
|
||||
peekBytes,
|
||||
readArray,
|
||||
readBits,
|
||||
readByte,
|
||||
readBytes,
|
||||
readString,
|
||||
readUnsigned
|
||||
} from '../jsbinaryschemaparser/lib/parsers/uint8'
|
||||
import { deinterlace } from './deinterlace'
|
||||
import { lzw } from './lzw'
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
|
||||
var _gif = _interopRequireDefault(_default);
|
||||
|
||||
function _interopRequireDefault(obj) {
|
||||
return { "default": obj };
|
||||
}
|
||||
|
||||
export function parseGIF(arrayBuffer) {
|
||||
var byteData = new Uint8Array(arrayBuffer);
|
||||
return (0, _jsBinarySchemaParser.parse)((0, _uint.buildStream)(byteData), _gif["default"]);
|
||||
return (parse)((buildStream)(byteData), _gif["default"]);
|
||||
};
|
||||
|
||||
|
||||
|
||||
export function generatePatch(image) {
|
||||
var totalPixels = image.pixels.length;
|
||||
var patchData = new Uint8ClampedArray(totalPixels * 4);
|
||||
|
@ -103,7 +113,6 @@ export function decompressFrame(frame, gct, buildImagePatch) {
|
|||
};
|
||||
|
||||
|
||||
|
||||
export function decompressFrames(parsedGif, buildImagePatches) {
|
||||
return parsedGif.frames.filter(function (f) {
|
||||
return f.image;
|
||||
|
|
Loading…
Reference in New Issue