Commit 3cdb69f7 authored by nanahira's avatar nanahira

fix leak part package

parent 1b149b8c
Pipeline #15331 passed with stages
in 2 minutes and 58 seconds
...@@ -18,12 +18,12 @@ import PQueue from 'p-queue'; ...@@ -18,12 +18,12 @@ import PQueue from 'p-queue';
import { LockService } from 'src/lock/lock.service'; import { LockService } from 'src/lock/lock.service';
import { InjectRedis, Redis } from '@nestjs-modules/ioredis'; import { InjectRedis, Redis } from '@nestjs-modules/ioredis';
export interface FileWithHash { interface FileWithHash {
file: readdirp.EntryInfo; file: readdirp.EntryInfo;
hash: string; hash: string;
} }
export class ArchiveTask { class ArchiveTask {
readonly path: string; readonly path: string;
constructor(public readonly role: ArchiveType, public readonly files: FileWithHash[], public readonly altFiles?: string[]) { constructor(public readonly role: ArchiveType, public readonly files: FileWithHash[], public readonly altFiles?: string[]) {
this.path = createHash('sha512') this.path = createHash('sha512')
...@@ -60,6 +60,21 @@ export class ArchiveTask { ...@@ -60,6 +60,21 @@ export class ArchiveTask {
} }
} }
class Bucket {
files: FileWithHash[] = [];
size = 0;
empty() {
this.files = [];
this.size = 0;
}
addFile(file: FileWithHash) {
this.files.push(file);
this.size += file.file.stats.size;
}
}
@Injectable() @Injectable()
export class PackagerService extends ConsoleLogger { export class PackagerService extends ConsoleLogger {
bucket_max = 10 * 1024 ** 2; bucket_max = 10 * 1024 ** 2;
...@@ -122,7 +137,7 @@ export class PackagerService extends ConsoleLogger { ...@@ -122,7 +137,7 @@ export class PackagerService extends ConsoleLogger {
); );
const archiveTasks: ArchiveTask[] = []; const archiveTasks: ArchiveTask[] = [];
const filesWithHash: FileWithHash[] = files.map((f) => ({ file: f, hash: checksum[f.path] })); const filesWithHash: FileWithHash[] = files.map((file) => ({ file, hash: checksum[file.path] }));
// 整包 // 整包
new ArchiveTask(ArchiveType.Full, filesWithHash, await fs.promises.readdir(root)).addToTask(archiveTasks); new ArchiveTask(ArchiveType.Full, filesWithHash, await fs.promises.readdir(root)).addToTask(archiveTasks);
...@@ -138,27 +153,24 @@ export class PackagerService extends ConsoleLogger { ...@@ -138,27 +153,24 @@ export class PackagerService extends ConsoleLogger {
const pendingPartTasks: ArchiveTask[] = []; const pendingPartTasks: ArchiveTask[] = [];
// 散包 // 散包
const buckets: Record<string, [FileWithHash[], number]> = {}; const buckets: Record<string, Bucket> = {};
for (const file of filesWithHash) { for (const file of filesWithHash) {
const extname = path.extname(file.file.basename); const extname = path.extname(file.file.basename);
if (file.file.stats.size < this.bucket_enter && !this.noGatherExts.has(extname)) { if (file.file.stats.size < this.bucket_enter && !this.noGatherExts.has(extname)) {
buckets[extname] ??= [[], 0]; buckets[extname] ??= new Bucket();
const bucket = buckets[extname]; const bucket = buckets[extname];
if (bucket[1] + file.file.stats.size >= this.bucket_max) { if (bucket.size + file.file.stats.size >= this.bucket_max) {
new ArchiveTask(ArchiveType.Part, bucket[0]).addToTask(pendingPartTasks, true); new ArchiveTask(ArchiveType.Part, bucket.files).addToTask(pendingPartTasks, true);
bucket[0] = []; bucket.empty();
bucket[1] = 0;
} else {
bucket[0].push(file);
bucket[1] += file.file.stats.size;
} }
bucket.addFile(file);
} else { } else {
new ArchiveTask(ArchiveType.Part, [file]).addToTask(pendingPartTasks, true); new ArchiveTask(ArchiveType.Part, [file]).addToTask(pendingPartTasks, true);
} }
} }
for (const bucket of Object.values(buckets)) { for (const bucket of Object.values(buckets)) {
if (bucket[0].length) { if (bucket.files.length) {
new ArchiveTask(ArchiveType.Part, bucket[0]).addToTask(pendingPartTasks, true); new ArchiveTask(ArchiveType.Part, bucket.files).addToTask(pendingPartTasks, true);
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment