Commit 9291d82e authored by nanahira's avatar nanahira

add hash cache

parent 413b50b1
Pipeline #5390 passed with stages
in 6 minutes and 2 seconds
......@@ -10,6 +10,7 @@
"dependencies": {
"@aws-sdk/client-s3": "^3.26.0",
"@aws-sdk/lib-storage": "^3.26.0",
"@cityssm/map-expire": "^1.1.1",
"@nestjs/cli": "^8.0.0",
"@nestjs/common": "^8.0.0",
"@nestjs/config": "^1.0.1",
......@@ -1918,6 +1919,11 @@
"integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==",
"dev": true
},
"node_modules/@cityssm/map-expire": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@cityssm/map-expire/-/map-expire-1.1.1.tgz",
"integrity": "sha512-eDOZR3XKkzu1fzUtr96LZFrqhbIVwrW3tTbbQB/C2F1Mj7eVdH5HFj93WyrWSV/kQJpj1orQUwrl/A2F8nUGRQ=="
},
"node_modules/@cspotcode/source-map-consumer": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz",
......@@ -12937,6 +12943,11 @@
"integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==",
"dev": true
},
"@cityssm/map-expire": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@cityssm/map-expire/-/map-expire-1.1.1.tgz",
"integrity": "sha512-eDOZR3XKkzu1fzUtr96LZFrqhbIVwrW3tTbbQB/C2F1Mj7eVdH5HFj93WyrWSV/kQJpj1orQUwrl/A2F8nUGRQ=="
},
"@cspotcode/source-map-consumer": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz",
......
......@@ -23,6 +23,7 @@
"dependencies": {
"@aws-sdk/client-s3": "^3.26.0",
"@aws-sdk/lib-storage": "^3.26.0",
"@cityssm/map-expire": "^1.1.1",
"@nestjs/cli": "^8.0.0",
"@nestjs/common": "^8.0.0",
"@nestjs/config": "^1.0.1",
......
......@@ -15,6 +15,7 @@ import { Archive, ArchiveType } from '../entities/Archive.entity';
import { AppService } from '../app.service';
import { createHash } from 'crypto';
import delay from 'delay';
import { Cache } from '@cityssm/map-expire';
export interface FileWithHash {
file: readdirp.EntryInfo;
......@@ -64,7 +65,8 @@ export class PackagerService extends ConsoleLogger {
bucket_enter = 1 * 1024 ** 2;
packagerWorkingDirectory: string;
uploadLock = new Set<string>();
private uploadLock = new Set<string>();
private hashCache = new Cache<string, string>();
constructor(
@Inject(forwardRef(() => AppService)) private readonly appService: AppService,
......@@ -199,12 +201,26 @@ export class PackagerService extends ConsoleLogger {
]);
}
private async lookForExistingArchiveHash(path: string) {
let hash = this.hashCache.get(path);
if (hash) {
return hash;
}
hash = await this.appService.lookForExistingArchiveHash(path);
if (hash) {
this.hashCache.set(path, hash, 24 * 60 * 60 * 1000);
return hash;
}
return null;
}
async archive(root: string, archiveTask: ArchiveTask): Promise<Archive> {
const archive = archiveTask.archive;
const archiveName = archiveTask.archiveFullPath;
await this.waitForLock(archiveTask.path);
const existing = await this.s3.fileExists(archiveName);
if (existing) {
const hash = await this.appService.lookForExistingArchiveHash(archiveTask.path);
const hash = await this.lookForExistingArchiveHash(archiveTask.path);
if (hash) {
this.log(`Archive ${archiveName} exists, skipping.`);
archive.size = existing.Size;
......@@ -212,7 +228,6 @@ export class PackagerService extends ConsoleLogger {
return archive;
}
}
await this.waitForLock(archiveTask.path);
const files = archiveTask.filePaths;
this.log(`Packaging archive ${archiveName} with ${archiveTask.exactFilePaths.length} files.`);
try {
......@@ -245,6 +260,7 @@ export class PackagerService extends ConsoleLogger {
});
const [, { object }] = await Promise.all([childPromise, uploadPromise]);
archive.hash = hashObject.digest('hex');
this.hashCache.set(archive.path, archive.hash, 24 * 60 * 60 * 1000);
archive.size = object.Size;
} catch (e) {
throw e;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment