排查性能优化点uuid,定时器以及磁盘读写

Signed-off-by: zenggaofeng <zenggaofeng2@h-partners.com>
This commit is contained in:
zenggaofeng 2024-04-09 14:28:27 +08:00
parent 15b77e25c4
commit ccb69591df
6 changed files with 18 additions and 120 deletions

View File

@ -1,6 +1,9 @@
## 2.1.2-rc.12
- 新增磁盘预加载返回文件路径接口prefetchToDiskCache
- 新增跳过网络判断缓存或者磁盘中是否存在图片接口isUrlExist
- 删除多余操作磁盘记录读写
- 清除定时器改为Gif图时清除
- uuid改为util.generateRandomUUID()
## 2.1.2-rc.11
- 修复设置磁盘容量最大值出现闪退

View File

@ -12,13 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import fileio from '@ohos.fileio'
import { CustomMap } from './CustomMap'
import { FileUtils } from './FileUtils'
import { FileReader } from './FileReader'
import { DiskCacheEntry } from './DiskCacheEntry'
import { SparkMD5 } from '../3rd_party/sparkmd5/spark-md5'
import { Context } from '@ohos.abilityAccessCtrl'
import common from '@ohos.app.ability.common'
export class DiskLruCache {
@ -26,16 +23,6 @@ export class DiskLruCache {
private static readonly DEFAULT_MAX_SIZE: number = 300 * 1024 * 1024
// 默认缓存文件名
private static readonly DEFAULT_NAME: string = 'diskLruCache'
// 缓存journal文件名称
private static readonly journal: string = 'journal'
// 缓存journal备份文件名称
private static readonly journalTemp: string = 'journal_temp'
// 备份文件save标识符
private static readonly SAVE: string = 'save'
// 备份文件read标识符
private static readonly READ: string = 'read'
// 备份文件remove标识符
private static readonly REMOVE: string = 'remove'
// 缓存文件路径地址
private path: string = ''
// 缓存journal文件路径
@ -52,8 +39,6 @@ export class DiskLruCache {
constructor(path: string, maxSize: number) {
this.path = path
this.maxSize = maxSize
this.journalPath = path + DiskLruCache.journal
this.journalPathTemp = path + DiskLruCache.journalTemp
}
/**
@ -83,26 +68,7 @@ export class DiskLruCache {
} else {
path = path + FileUtils.SEPARATOR
}
let journalPath = path + DiskLruCache.journal
let journalPathTemp = path + DiskLruCache.journalTemp
// 判断日志文件是否存在,如果没有初始化创建
if (FileUtils.getInstance().exist(journalPath)) {
let stat = fileio.statSync(journalPath)
if (stat.size > 0) {
FileUtils.getInstance().createFile(journalPathTemp)
FileUtils.getInstance().copyFile(journalPath, journalPathTemp)
let diskLruCache: DiskLruCache = new DiskLruCache(path, maxSize)
diskLruCache.readJournal(journalPathTemp)
diskLruCache.resetJournalFile()
return diskLruCache
} else {
return new DiskLruCache(path, maxSize)
}
} else {
FileUtils.getInstance().createFile(journalPath)
return new DiskLruCache(path, maxSize)
}
return new DiskLruCache(path, maxSize)
}
/**
@ -147,7 +113,6 @@ export class DiskLruCache {
key = SparkMD5.hashBinary(key)
this.size = this.size + fileSize
this.putCacheMap(key, fileSize)
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.SAVE + ' ' + key + FileReader.LF)
this.trimToSize()
let tempPath = this.path + key
FileUtils.getInstance().writeNewFile(tempPath, content)
@ -182,7 +147,6 @@ export class DiskLruCache {
key = SparkMD5.hashBinary(key)
this.size = this.size + fileSize
this.putCacheMap(key, fileSize)
await FileUtils.getInstance().writeDataAsync(this.journalPath, DiskLruCache.SAVE + ' ' + key + FileReader.LF)
this.trimToSize()
let tempPath = this.path + key
await FileUtils.getInstance().writeNewFileAsync(tempPath, content)
@ -202,7 +166,6 @@ export class DiskLruCache {
if (FileUtils.getInstance().exist(path)) {
let ab: ArrayBuffer = FileUtils.getInstance().readFile(path)
this.putCacheMap(key, ab.byteLength)
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.READ + ' ' + key + FileReader.LF)
return ab
} else {
return undefined;
@ -223,7 +186,6 @@ export class DiskLruCache {
if (FileUtils.getInstance().exist(path)) {
let ab: ArrayBuffer = await FileUtils.getInstance().readFileAsync(path)
this.putCacheMap(key, ab.byteLength)
await FileUtils.getInstance().writeDataAsync(this.journalPath, DiskLruCache.READ + ' ' + key + FileReader.LF)
return ab
} else {
return undefined;
@ -242,7 +204,6 @@ export class DiskLruCache {
key = SparkMD5.hashBinary(key);
let path = this.path + key;
if (FileUtils.getInstance().exist(path)) {
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.READ + ' ' + key + FileReader.LF);
return path
} else {
return "";
@ -261,7 +222,6 @@ export class DiskLruCache {
key = SparkMD5.hashBinary(key);
let path = this.path + key;
if (FileUtils.getInstance().exist(path)) {
await FileUtils.getInstance().writeDataAsync(this.journalPath, DiskLruCache.READ + ' ' + key + FileReader.LF);
return path
} else {
return ""
@ -290,7 +250,6 @@ export class DiskLruCache {
let ab = FileUtils.getInstance().readFile(path)
this.size = this.size - ab.byteLength
this.cacheMap.remove(key)
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.REMOVE + ' ' + key + FileReader.LF)
FileUtils.getInstance().deleteFile(path)
}
return this.cacheMap.get(key) as DiskCacheEntry;
@ -328,65 +287,6 @@ export class DiskLruCache {
return this.size;
}
/**
* 处理journal文件数据
*
* @param line 日志行字符串
*/
private dealWithJournal(line: string) {
let filePath = ''
try {
let lineData = line.split(' ')
if (lineData.length > 1) {
if (lineData[0] != DiskLruCache.REMOVE) {
filePath = this.path + lineData[1]
let fileStat = fileio.statSync(filePath)
if (fileStat.isFile() && fileStat.size > 0) {
this.size = this.size + fileStat.size
FileUtils.getInstance().writeData(this.journalPath, line + FileReader.LF)
this.putCacheMap(lineData[1], fileStat.size)
}
} else {
if (this.cacheMap.hasKey(lineData[1])) {
let cacheEntry: DiskCacheEntry = this.cacheMap.get(lineData[1]) as DiskCacheEntry;
this.size = this.size - cacheEntry.getLength()
this.cacheMap.remove(lineData[1])
}
}
}
} catch (e) {
console.error('DiskLruCache - dealWithJournal e ' + e)
}
}
/**
* 重置journal文件数据
*/
private resetJournalFile() {
FileUtils.getInstance().clearFile(this.journalPath)
for (let key of this.cacheMap.keys()) {
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.SAVE + ' ' + key + FileReader.LF)
}
}
/**
* 读取journal文件的缓存数据
*
* @param path 日志缓存文件路径地址
*/
private readJournal(path: string) {
let fileReader = new FileReader(path)
let line: string = ''
while (!fileReader.isEnd()) {
line = fileReader.readLine()
line = line.replace(FileReader.LF, '').replace(FileReader.CR, '')
this.dealWithJournal(line)
}
fileReader.close()
FileUtils.getInstance().deleteFile(this.journalPathTemp)
this.trimToSize()
}
/**
* 缓存数据map集合
*
@ -413,7 +313,6 @@ export class DiskLruCache {
}
FileUtils.getInstance().deleteFile(this.path + tempKey)
this.cacheMap.remove(tempKey)
FileUtils.getInstance().writeData(this.journalPath, DiskLruCache.REMOVE + ' ' + tempKey + FileReader.LF)
}
}
@ -439,8 +338,6 @@ export class DiskLruCache {
throw new Error('key is null, checking the parameter')
}
key = SparkMD5.hashBinary(key);
FileUtils.getInstance()
.writeData(path + DiskLruCache.journal, DiskLruCache.SAVE + ' ' + key + FileReader.LF);
FileUtils.getInstance().writeNewFile(path + key, value);
return true
}
@ -460,7 +357,6 @@ export class DiskLruCache {
let filepath = path + key;
if (FileUtils.getInstance().exist(filepath)) {
let ab: ArrayBuffer = FileUtils.getInstance().readFile(filepath)
FileUtils.getInstance().writeData(path + DiskLruCache.journal, DiskLruCache.READ + ' ' + key + FileReader.LF)
return ab
} else {
return undefined;

View File

@ -534,7 +534,6 @@ export class ImageKnife {
this.pendingMaps.remove(nextPending.uuid)
this.runningMaps.put(nextPending.uuid, nextPending);
// RequestManager.execute((nextPending as RequestOption), this.memoryCache, this.diskMemoryCache, this.dataFetch, this.resourceFetch)
this.taskpoolLoadResource(nextPending, Constants.MAIN_HOLDER);
}
@ -565,7 +564,6 @@ export class ImageKnife {
let nextPending = pendingTailNode.value;
this.runningMaps.put(nextPending.uuid, nextPending)
this.pendingMaps.remove(nextPending.uuid)
//RequestManager.execute((nextPending as RequestOption), this.memoryCache, this.diskMemoryCache, this.dataFetch, this.resourceFetch)
this.taskpoolLoadResource(nextPending, Constants.MAIN_HOLDER);
}
}
@ -733,13 +731,11 @@ export class ImageKnife {
} else {
if ((typeof (data as PixelMap).isEditable) == 'boolean') {
let imageKnifeData = ImageKnifeData.createImagePixelMap(ImageKnifeType.PIXELMAP, data as PixelMap);
imageKnifeData.needSaveDisk = true;
request.loadComplete(imageKnifeData)
this.memoryCacheProxy.putValue(request.generateCacheKey,imageKnifeData)
this.setDiskCache(request)
} else if ((data as GIFFrame[]).length > 0) {
let imageKnifeData = ImageKnifeData.createImageGIFFrame(ImageKnifeType.GIFFRAME, data as GIFFrame[]);
imageKnifeData.needSaveDisk = true;
request.loadComplete(imageKnifeData)
this.memoryCacheProxy.putValue(request.generateCacheKey,imageKnifeData)
this.setDiskCache(request)

View File

@ -780,10 +780,12 @@ export struct ImageKnifeComponent {
}
private resetGifData() {
clearTimeout(this.gifTimerId)
this.gifLoopDuration = 0;
this.startGifLoopTime = 0;
this.endGifLoopTime = 0;
if(this.isGif) {
clearTimeout(this.gifTimerId)
this.gifLoopDuration = 0;
this.startGifLoopTime = 0;
this.endGifLoopTime = 0;
}
}
/**

View File

@ -96,7 +96,6 @@ export class ImageKnifeData {
static BMP = 'bmp';
static WEBP = 'webp';
waitSaveDisk = false;
needSaveDisk = false;
imageKnifeType: ImageKnifeType | undefined = undefined;
drawPixelMap: DrawPixelMap | undefined = undefined;
drawGIFFrame: DrawGIFFrame | undefined = undefined;

View File

@ -58,6 +58,7 @@ import { DiskCacheProxy } from './requestmanage/DiskCacheProxy'
import { DiskLruCache } from '../cache/DiskLruCache'
import { SparkMD5 } from '../3rd_party/sparkmd5/spark-md5'
import { FileUtils } from '../cache/FileUtils'
import util from '@ohos.util'
export interface Size {
width: number,
@ -182,12 +183,13 @@ export class RequestOption {
this.transformations = array;
}
generateUUID(): string {
let d = new Date().getTime();
const uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(new RegExp("[xy]", "g"), (c) => {
const r = (d + Math.random() * 16) % 16 | 0;
d = Math.floor(d / 16);
return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
});
const uuid = util.generateRandomUUID()
// let d = new Date().getTime();
// const uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(new RegExp("[xy]", "g"), (c) => {
// const r = (d + Math.random() * 16) % 16 | 0;
// d = Math.floor(d / 16);
// return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16);
// });
return uuid;
}