forked from floraachy/ImageKnife
commit
99e251c6bd
|
@ -2,6 +2,9 @@
|
||||||
- 新增gif播放次数功能
|
- 新增gif播放次数功能
|
||||||
- 新增磁盘预加载返回文件路径接口prefetchToDiskCache
|
- 新增磁盘预加载返回文件路径接口prefetchToDiskCache
|
||||||
- 新增跳过网络判断缓存或者磁盘中是否存在图片接口isUrlExist
|
- 新增跳过网络判断缓存或者磁盘中是否存在图片接口isUrlExist
|
||||||
|
- 删除多余操作磁盘记录读写
|
||||||
|
- 清除定时器改为Gif图时清除
|
||||||
|
- uuid改为util.generateRandomUUID()
|
||||||
|
|
||||||
## 2.1.2-rc.11
|
## 2.1.2-rc.11
|
||||||
- 修复设置磁盘容量最大值出现闪退
|
- 修复设置磁盘容量最大值出现闪退
|
||||||
|
|
|
@ -12,13 +12,10 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
import fileio from '@ohos.fileio'
|
|
||||||
import { CustomMap } from './CustomMap'
|
import { CustomMap } from './CustomMap'
|
||||||
import { FileUtils } from './FileUtils'
|
import { FileUtils } from './FileUtils'
|
||||||
import { FileReader } from './FileReader'
|
|
||||||
import { DiskCacheEntry } from './DiskCacheEntry'
|
import { DiskCacheEntry } from './DiskCacheEntry'
|
||||||
import { SparkMD5 } from '../3rd_party/sparkmd5/spark-md5'
|
import { SparkMD5 } from '../3rd_party/sparkmd5/spark-md5'
|
||||||
import { Context } from '@ohos.abilityAccessCtrl'
|
|
||||||
import common from '@ohos.app.ability.common'
|
import common from '@ohos.app.ability.common'
|
||||||
|
|
||||||
export class DiskLruCache {
|
export class DiskLruCache {
|
||||||
|
@ -26,16 +23,6 @@ export class DiskLruCache {
|
||||||
private static readonly DEFAULT_MAX_SIZE: number = 300 * 1024 * 1024
|
private static readonly DEFAULT_MAX_SIZE: number = 300 * 1024 * 1024
|
||||||
// 默认缓存文件名
|
// 默认缓存文件名
|
||||||
private static readonly DEFAULT_NAME: string = 'diskLruCache'
|
private static readonly DEFAULT_NAME: string = 'diskLruCache'
|
||||||
// 缓存journal文件名称
|
|
||||||
private static readonly journal: string = 'journal'
|
|
||||||
// 缓存journal备份文件名称
|
|
||||||
private static readonly journalTemp: string = 'journal_temp'
|
|
||||||
// 备份文件save标识符
|
|
||||||
private static readonly SAVE: string = 'save'
|
|
||||||
// 备份文件read标识符
|
|
||||||
private static readonly READ: string = 'read'
|
|
||||||
// 备份文件remove标识符
|
|
||||||
private static readonly REMOVE: string = 'remove'
|
|
||||||
// 缓存文件路径地址
|
// 缓存文件路径地址
|
||||||
private path: string = ''
|
private path: string = ''
|
||||||
// 缓存journal文件路径
|
// 缓存journal文件路径
|
||||||
|
@ -52,8 +39,6 @@ export class DiskLruCache {
|
||||||
constructor(path: string, maxSize: number) {
|
constructor(path: string, maxSize: number) {
|
||||||
this.path = path
|
this.path = path
|
||||||
this.maxSize = maxSize
|
this.maxSize = maxSize
|
||||||
this.journalPath = path + DiskLruCache.journal
|
|
||||||
this.journalPathTemp = path + DiskLruCache.journalTemp
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -83,26 +68,7 @@ export class DiskLruCache {
|
||||||
} else {
|
} else {
|
||||||
path = path + FileUtils.SEPARATOR
|
path = path + FileUtils.SEPARATOR
|
||||||
}
|
}
|
||||||
let journalPath = path + DiskLruCache.journal
|
return new DiskLruCache(path, maxSize)
|
||||||
let journalPathTemp = path + DiskLruCache.journalTemp
|
|
||||||
|
|
||||||
// 判断日志文件是否存在,如果没有初始化创建
|
|
||||||
if (FileUtils.getInstance().exist(journalPath)) {
|
|
||||||
let stat = fileio.statSync(journalPath)
|
|
||||||
if (stat.size > 0) {
|
|
||||||
FileUtils.getInstance().createFile(journalPathTemp)
|
|
||||||
FileUtils.getInstance().copyFile(journalPath, journalPathTemp)
|
|
||||||
let diskLruCache: DiskLruCache = new DiskLruCache(path, maxSize)
|
|
||||||
diskLruCache.readJournal(journalPathTemp)
|
|
||||||
diskLruCache.resetJournalFile()
|
|
||||||
return diskLruCache
|
|
||||||
} else {
|
|
||||||
return new DiskLruCache(path, maxSize)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
FileUtils.getInstance().createFile(journalPath)
|
|
||||||
return new DiskLruCache(path, maxSize)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -147,7 +113,6 @@ export class DiskLruCache {
|
||||||
key = SparkMD5.hashBinary(key)
|
key = SparkMD5.hashBinary(key)
|
||||||
this.size = this.size + fileSize
|
this.size = this.size + fileSize
|
||||||
this.putCacheMap(key, fileSize)
|
this.putCacheMap(key, fileSize)
|
||||||
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.SAVE + ' ' + key + FileReader.LF)
|
|
||||||
this.trimToSize()
|
this.trimToSize()
|
||||||
let tempPath = this.path + key
|
let tempPath = this.path + key
|
||||||
FileUtils.getInstance().writeNewFile(tempPath, content)
|
FileUtils.getInstance().writeNewFile(tempPath, content)
|
||||||
|
@ -182,7 +147,6 @@ export class DiskLruCache {
|
||||||
key = SparkMD5.hashBinary(key)
|
key = SparkMD5.hashBinary(key)
|
||||||
this.size = this.size + fileSize
|
this.size = this.size + fileSize
|
||||||
this.putCacheMap(key, fileSize)
|
this.putCacheMap(key, fileSize)
|
||||||
await FileUtils.getInstance().writeDataAsync(this.journalPath, DiskLruCache.SAVE + ' ' + key + FileReader.LF)
|
|
||||||
this.trimToSize()
|
this.trimToSize()
|
||||||
let tempPath = this.path + key
|
let tempPath = this.path + key
|
||||||
await FileUtils.getInstance().writeNewFileAsync(tempPath, content)
|
await FileUtils.getInstance().writeNewFileAsync(tempPath, content)
|
||||||
|
@ -202,7 +166,6 @@ export class DiskLruCache {
|
||||||
if (FileUtils.getInstance().exist(path)) {
|
if (FileUtils.getInstance().exist(path)) {
|
||||||
let ab: ArrayBuffer = FileUtils.getInstance().readFile(path)
|
let ab: ArrayBuffer = FileUtils.getInstance().readFile(path)
|
||||||
this.putCacheMap(key, ab.byteLength)
|
this.putCacheMap(key, ab.byteLength)
|
||||||
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.READ + ' ' + key + FileReader.LF)
|
|
||||||
return ab
|
return ab
|
||||||
} else {
|
} else {
|
||||||
return undefined;
|
return undefined;
|
||||||
|
@ -223,7 +186,6 @@ export class DiskLruCache {
|
||||||
if (FileUtils.getInstance().exist(path)) {
|
if (FileUtils.getInstance().exist(path)) {
|
||||||
let ab: ArrayBuffer = await FileUtils.getInstance().readFileAsync(path)
|
let ab: ArrayBuffer = await FileUtils.getInstance().readFileAsync(path)
|
||||||
this.putCacheMap(key, ab.byteLength)
|
this.putCacheMap(key, ab.byteLength)
|
||||||
await FileUtils.getInstance().writeDataAsync(this.journalPath, DiskLruCache.READ + ' ' + key + FileReader.LF)
|
|
||||||
return ab
|
return ab
|
||||||
} else {
|
} else {
|
||||||
return undefined;
|
return undefined;
|
||||||
|
@ -242,7 +204,6 @@ export class DiskLruCache {
|
||||||
key = SparkMD5.hashBinary(key);
|
key = SparkMD5.hashBinary(key);
|
||||||
let path = this.path + key;
|
let path = this.path + key;
|
||||||
if (FileUtils.getInstance().exist(path)) {
|
if (FileUtils.getInstance().exist(path)) {
|
||||||
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.READ + ' ' + key + FileReader.LF);
|
|
||||||
return path
|
return path
|
||||||
} else {
|
} else {
|
||||||
return "";
|
return "";
|
||||||
|
@ -261,7 +222,6 @@ export class DiskLruCache {
|
||||||
key = SparkMD5.hashBinary(key);
|
key = SparkMD5.hashBinary(key);
|
||||||
let path = this.path + key;
|
let path = this.path + key;
|
||||||
if (FileUtils.getInstance().exist(path)) {
|
if (FileUtils.getInstance().exist(path)) {
|
||||||
await FileUtils.getInstance().writeDataAsync(this.journalPath, DiskLruCache.READ + ' ' + key + FileReader.LF);
|
|
||||||
return path
|
return path
|
||||||
} else {
|
} else {
|
||||||
return ""
|
return ""
|
||||||
|
@ -290,7 +250,6 @@ export class DiskLruCache {
|
||||||
let ab = FileUtils.getInstance().readFile(path)
|
let ab = FileUtils.getInstance().readFile(path)
|
||||||
this.size = this.size - ab.byteLength
|
this.size = this.size - ab.byteLength
|
||||||
this.cacheMap.remove(key)
|
this.cacheMap.remove(key)
|
||||||
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.REMOVE + ' ' + key + FileReader.LF)
|
|
||||||
FileUtils.getInstance().deleteFile(path)
|
FileUtils.getInstance().deleteFile(path)
|
||||||
}
|
}
|
||||||
return this.cacheMap.get(key) as DiskCacheEntry;
|
return this.cacheMap.get(key) as DiskCacheEntry;
|
||||||
|
@ -328,65 +287,6 @@ export class DiskLruCache {
|
||||||
return this.size;
|
return this.size;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* 处理journal文件数据
|
|
||||||
*
|
|
||||||
* @param line 日志行字符串
|
|
||||||
*/
|
|
||||||
private dealWithJournal(line: string) {
|
|
||||||
let filePath = ''
|
|
||||||
try {
|
|
||||||
let lineData = line.split(' ')
|
|
||||||
if (lineData.length > 1) {
|
|
||||||
if (lineData[0] != DiskLruCache.REMOVE) {
|
|
||||||
filePath = this.path + lineData[1]
|
|
||||||
let fileStat = fileio.statSync(filePath)
|
|
||||||
if (fileStat.isFile() && fileStat.size > 0) {
|
|
||||||
this.size = this.size + fileStat.size
|
|
||||||
FileUtils.getInstance().writeData(this.journalPath, line + FileReader.LF)
|
|
||||||
this.putCacheMap(lineData[1], fileStat.size)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (this.cacheMap.hasKey(lineData[1])) {
|
|
||||||
let cacheEntry: DiskCacheEntry = this.cacheMap.get(lineData[1]) as DiskCacheEntry;
|
|
||||||
this.size = this.size - cacheEntry.getLength()
|
|
||||||
this.cacheMap.remove(lineData[1])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error('DiskLruCache - dealWithJournal e ' + e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 重置journal文件数据
|
|
||||||
*/
|
|
||||||
private resetJournalFile() {
|
|
||||||
FileUtils.getInstance().clearFile(this.journalPath)
|
|
||||||
for (let key of this.cacheMap.keys()) {
|
|
||||||
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.SAVE + ' ' + key + FileReader.LF)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 读取journal文件的缓存数据
|
|
||||||
*
|
|
||||||
* @param path 日志缓存文件路径地址
|
|
||||||
*/
|
|
||||||
private readJournal(path: string) {
|
|
||||||
let fileReader = new FileReader(path)
|
|
||||||
let line: string = ''
|
|
||||||
while (!fileReader.isEnd()) {
|
|
||||||
line = fileReader.readLine()
|
|
||||||
line = line.replace(FileReader.LF, '').replace(FileReader.CR, '')
|
|
||||||
this.dealWithJournal(line)
|
|
||||||
}
|
|
||||||
fileReader.close()
|
|
||||||
FileUtils.getInstance().deleteFile(this.journalPathTemp)
|
|
||||||
this.trimToSize()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 缓存数据map集合
|
* 缓存数据map集合
|
||||||
*
|
*
|
||||||
|
@ -413,7 +313,6 @@ export class DiskLruCache {
|
||||||
}
|
}
|
||||||
FileUtils.getInstance().deleteFile(this.path + tempKey)
|
FileUtils.getInstance().deleteFile(this.path + tempKey)
|
||||||
this.cacheMap.remove(tempKey)
|
this.cacheMap.remove(tempKey)
|
||||||
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.REMOVE + ' ' + tempKey + FileReader.LF)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -439,8 +338,6 @@ export class DiskLruCache {
|
||||||
throw new Error('key is null, checking the parameter')
|
throw new Error('key is null, checking the parameter')
|
||||||
}
|
}
|
||||||
key = SparkMD5.hashBinary(key);
|
key = SparkMD5.hashBinary(key);
|
||||||
FileUtils.getInstance()
|
|
||||||
.writeData(path + DiskLruCache.journal, DiskLruCache.SAVE + ' ' + key + FileReader.LF);
|
|
||||||
FileUtils.getInstance().writeNewFile(path + key, value);
|
FileUtils.getInstance().writeNewFile(path + key, value);
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -460,7 +357,6 @@ export class DiskLruCache {
|
||||||
let filepath = path + key;
|
let filepath = path + key;
|
||||||
if (FileUtils.getInstance().exist(filepath)) {
|
if (FileUtils.getInstance().exist(filepath)) {
|
||||||
let ab: ArrayBuffer = FileUtils.getInstance().readFile(filepath)
|
let ab: ArrayBuffer = FileUtils.getInstance().readFile(filepath)
|
||||||
FileUtils.getInstance().writeData(path + DiskLruCache.journal, DiskLruCache.READ + ' ' + key + FileReader.LF)
|
|
||||||
return ab
|
return ab
|
||||||
} else {
|
} else {
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|
|
@ -534,7 +534,6 @@ export class ImageKnife {
|
||||||
this.pendingMaps.remove(nextPending.uuid)
|
this.pendingMaps.remove(nextPending.uuid)
|
||||||
this.runningMaps.put(nextPending.uuid, nextPending);
|
this.runningMaps.put(nextPending.uuid, nextPending);
|
||||||
|
|
||||||
// RequestManager.execute((nextPending as RequestOption), this.memoryCache, this.diskMemoryCache, this.dataFetch, this.resourceFetch)
|
|
||||||
this.taskpoolLoadResource(nextPending, Constants.MAIN_HOLDER);
|
this.taskpoolLoadResource(nextPending, Constants.MAIN_HOLDER);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -565,7 +564,6 @@ export class ImageKnife {
|
||||||
let nextPending = pendingTailNode.value;
|
let nextPending = pendingTailNode.value;
|
||||||
this.runningMaps.put(nextPending.uuid, nextPending)
|
this.runningMaps.put(nextPending.uuid, nextPending)
|
||||||
this.pendingMaps.remove(nextPending.uuid)
|
this.pendingMaps.remove(nextPending.uuid)
|
||||||
//RequestManager.execute((nextPending as RequestOption), this.memoryCache, this.diskMemoryCache, this.dataFetch, this.resourceFetch)
|
|
||||||
this.taskpoolLoadResource(nextPending, Constants.MAIN_HOLDER);
|
this.taskpoolLoadResource(nextPending, Constants.MAIN_HOLDER);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -733,13 +731,11 @@ export class ImageKnife {
|
||||||
} else {
|
} else {
|
||||||
if ((typeof (data as PixelMap).isEditable) == 'boolean') {
|
if ((typeof (data as PixelMap).isEditable) == 'boolean') {
|
||||||
let imageKnifeData = ImageKnifeData.createImagePixelMap(ImageKnifeType.PIXELMAP, data as PixelMap);
|
let imageKnifeData = ImageKnifeData.createImagePixelMap(ImageKnifeType.PIXELMAP, data as PixelMap);
|
||||||
imageKnifeData.needSaveDisk = true;
|
|
||||||
request.loadComplete(imageKnifeData)
|
request.loadComplete(imageKnifeData)
|
||||||
this.memoryCacheProxy.putValue(request.generateCacheKey,imageKnifeData)
|
this.memoryCacheProxy.putValue(request.generateCacheKey,imageKnifeData)
|
||||||
this.setDiskCache(request)
|
this.setDiskCache(request)
|
||||||
} else if ((data as GIFFrame[]).length > 0) {
|
} else if ((data as GIFFrame[]).length > 0) {
|
||||||
let imageKnifeData = ImageKnifeData.createImageGIFFrame(ImageKnifeType.GIFFRAME, data as GIFFrame[]);
|
let imageKnifeData = ImageKnifeData.createImageGIFFrame(ImageKnifeType.GIFFRAME, data as GIFFrame[]);
|
||||||
imageKnifeData.needSaveDisk = true;
|
|
||||||
request.loadComplete(imageKnifeData)
|
request.loadComplete(imageKnifeData)
|
||||||
this.memoryCacheProxy.putValue(request.generateCacheKey,imageKnifeData)
|
this.memoryCacheProxy.putValue(request.generateCacheKey,imageKnifeData)
|
||||||
this.setDiskCache(request)
|
this.setDiskCache(request)
|
||||||
|
|
|
@ -782,10 +782,12 @@ export struct ImageKnifeComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
private resetGifData() {
|
private resetGifData() {
|
||||||
clearTimeout(this.gifTimerId)
|
if(this.isGif) {
|
||||||
this.gifLoopDuration = 0;
|
clearTimeout(this.gifTimerId)
|
||||||
this.startGifLoopTime = 0;
|
this.gifLoopDuration = 0;
|
||||||
this.endGifLoopTime = 0;
|
this.startGifLoopTime = 0;
|
||||||
|
this.endGifLoopTime = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -96,7 +96,6 @@ export class ImageKnifeData {
|
||||||
static BMP = 'bmp';
|
static BMP = 'bmp';
|
||||||
static WEBP = 'webp';
|
static WEBP = 'webp';
|
||||||
waitSaveDisk = false;
|
waitSaveDisk = false;
|
||||||
needSaveDisk = false;
|
|
||||||
imageKnifeType: ImageKnifeType | undefined = undefined;
|
imageKnifeType: ImageKnifeType | undefined = undefined;
|
||||||
drawPixelMap: DrawPixelMap | undefined = undefined;
|
drawPixelMap: DrawPixelMap | undefined = undefined;
|
||||||
drawGIFFrame: DrawGIFFrame | undefined = undefined;
|
drawGIFFrame: DrawGIFFrame | undefined = undefined;
|
||||||
|
|
|
@ -58,6 +58,7 @@ import { DiskCacheProxy } from './requestmanage/DiskCacheProxy'
|
||||||
import { DiskLruCache } from '../cache/DiskLruCache'
|
import { DiskLruCache } from '../cache/DiskLruCache'
|
||||||
import { SparkMD5 } from '../3rd_party/sparkmd5/spark-md5'
|
import { SparkMD5 } from '../3rd_party/sparkmd5/spark-md5'
|
||||||
import { FileUtils } from '../cache/FileUtils'
|
import { FileUtils } from '../cache/FileUtils'
|
||||||
|
import util from '@ohos.util'
|
||||||
|
|
||||||
export interface Size {
|
export interface Size {
|
||||||
width: number,
|
width: number,
|
||||||
|
@ -182,12 +183,13 @@ export class RequestOption {
|
||||||
this.transformations = array;
|
this.transformations = array;
|
||||||
}
|
}
|
||||||
generateUUID(): string {
|
generateUUID(): string {
|
||||||
let d = new Date().getTime();
|
const uuid = util.generateRandomUUID()
|
||||||
const uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(new RegExp("[xy]", "g"), (c) => {
|
// let d = new Date().getTime();
|
||||||
const r = (d + Math.random() * 16) % 16 | 0;
|
// const uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(new RegExp("[xy]", "g"), (c) => {
|
||||||
d = Math.floor(d / 16);
|
// const r = (d + Math.random() * 16) % 16 | 0;
|
||||||
return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
|
// d = Math.floor(d / 16);
|
||||||
});
|
// return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
|
||||||
|
// });
|
||||||
return uuid;
|
return uuid;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue