Commit b224a62c authored by nanahira's avatar nanahira

Merge branch 'file-relation'

parents 22b39682 d90e2bc4
Pipeline #4936 passed with stages
in 4 minutes
......@@ -35,10 +35,18 @@ export class AdminController {
@Post('migrate/hash')
@ApiOperation({ summary: '迁移 hash' })
@ApiCreatedResponse({ type: BlankReturnMessageDto })
async migrateHash(@UploadedFile('file') file: Express.Multer.File): Promise<BlankReturnMessageDto> {
async migrateHash(): Promise<BlankReturnMessageDto> {
return this.appService.migrateHashes();
}
/*
@Post('migrate/filesField')
@ApiOperation({ summary: '迁移 file 字段' })
@ApiCreatedResponse({ type: BlankReturnMessageDto })
async migrateFilesField(): Promise<BlankReturnMessageDto> {
return this.appService.migrateFilesField();
}*/
@Put('app/:id')
@ApiOperation({ summary: '创建 app' })
@ApiOkResponse({ type: BlankReturnMessageDto })
......
......@@ -16,6 +16,7 @@ import { UpdateController } from './update/update.controller';
import { UpdateService } from './update/update.service';
import { ServeStaticModule } from '@nestjs/serve-static';
import path from 'path';
import { ArchiveFile } from './entities/ArchiveFile.entity';
const configModule = ConfigModule.forRoot();
......@@ -32,7 +33,7 @@ const configModule = ConfigModule.forRoot();
useFactory: async (config: ConfigService) => {
return {
type: 'postgres',
entities: [App, AppHistory, Archive, Build, Depot], // entities here
entities: [App, AppHistory, Archive, Build, Depot, ArchiveFile], // entities here
synchronize: !config.get('DB_NO_INIT'),
host: config.get('DB_HOST'),
port: parseInt(config.get('DB_PORT')) || 5432,
......
......@@ -15,6 +15,8 @@ import { Archive, ArchiveType } from './entities/Archive.entity';
import { PackageS3Service } from './package-s3/package-s3.service';
import axios from 'axios';
import { createHash } from 'crypto';
import { ArchiveFile } from './entities/ArchiveFile.entity';
import _ from 'lodash';
@Injectable()
export class AppService extends ConsoleLogger {
......@@ -294,7 +296,8 @@ export class AppService extends ConsoleLogger {
}
await this.purgeRelatedArchives(build);
await this.db.transaction(async (edb) => {
//await edb.getRepository(Archive).delete({ build });
//const archives = await edb.getRepository(Archive).find({ where: { build } });
//await edb.getRepository(ArchiveFile).delete({ archive: In(archives) });
await edb.getRepository(Build).delete(build);
});
return new BlankReturnMessageDto(200, 'success');
......@@ -340,4 +343,24 @@ export class AppService extends ConsoleLogger {
await this.db.getRepository(Archive).save(archivesToDo);
return new BlankReturnMessageDto(200, 'success');
}
/*
async migrateFilesField() {
const archives = await this.db.getRepository(Archive).find({ select: ['id', 'files'] });
await this.db.transaction(async (edb) => {
for (const a of archives) {
this.log(`Processing archive ${a.id} with ${a.files.length} files.`);
const files = a.files.map((f) => {
const fileEnt = ArchiveFile.fromPath(f);
fileEnt.archive = a;
return fileEnt;
});
await edb.getRepository(ArchiveFile).save(files);
}
});
this.log(`Done.`);
return new BlankReturnMessageDto(200, 'success');
}
*/
}
import { Column, Entity, ManyToOne, PrimaryGeneratedColumn } from 'typeorm';
import { Column, Entity, ManyToOne, OneToMany, PrimaryGeneratedColumn } from 'typeorm';
import { Build } from './Build.entity';
import { Index } from 'typeorm';
import { TimeBase } from './TimeBase.entity';
import { ArchiveFile } from './ArchiveFile.entity';
export enum ArchiveType {
Full = 'full',
......@@ -14,8 +15,11 @@ export class Archive extends TimeBase {
@PrimaryGeneratedColumn({ type: 'int8' })
id: number;
@Column('varchar', { length: 256, array: true })
files: string[];
//@Column('varchar', { length: 256, array: true })
//files: string[];
@OneToMany(() => ArchiveFile, (file) => file.archive, { cascade: true })
containingFiles: ArchiveFile[];
@Index()
@Column('varchar', { length: 128 })
......
import { Entity, Index, ManyToOne, PrimaryColumn } from 'typeorm';
import { Archive } from './Archive.entity';
@Entity()
export class ArchiveFile {
@ManyToOne(() => Archive, (a) => a.containingFiles, { primary: true, onDelete: 'CASCADE' })
archive: Archive;
@Index()
@PrimaryColumn('varchar', { length: 256 })
path: string;
static fromPath(path: string) {
const archiveFile = new ArchiveFile();
archiveFile.path = path;
return archiveFile;
}
}
......@@ -16,6 +16,7 @@ import { Archive, ArchiveType } from '../entities/Archive.entity';
import { Build } from '../entities/Build.entity';
import { AppService } from '../app.service';
import { createHash } from 'crypto';
import { ArchiveFile } from '../entities/ArchiveFile.entity';
export interface FileWithHash {
file: readdirp.EntryInfo;
......@@ -46,7 +47,8 @@ export class ArchiveTask {
const archive = new Archive();
archive.path = this.path;
archive.role = this.role;
archive.files = this.exactFilePaths;
//archive.files = this.exactFilePaths;
archive.containingFiles = this.exactFilePaths.map((filePath) => ArchiveFile.fromPath(filePath));
return archive;
}
......
......@@ -8,6 +8,9 @@ import { BlankReturnMessageDto } from '../dto/ReturnMessage.dto';
import { Archive, ArchiveType } from '../entities/Archive.entity';
import { PackageS3Service } from '../package-s3/package-s3.service';
import _ from 'lodash';
import { ArchiveFile } from '../entities/ArchiveFile.entity';
import * as os from 'os';
import moment from 'moment';
@Injectable()
export class UpdateService extends ConsoleLogger {
......@@ -135,36 +138,92 @@ export class UpdateService extends ConsoleLogger {
}
async getPartPackageMetalink(id: string, depotDto: DepotDto, version: string, requestedFiles: string[]) {
const tryExactArchives = await this.getArchives(
id,
depotDto,
version,
(qb) =>
qb
.andWhere('archive.role != :partRole', { partRole: ArchiveType.Part })
.andWhere(':requestedFiles = archive.files', { requestedFiles: requestedFiles })
// .orderBy('archive.size', 'ASC')
.limit(1),
true
);
const build = await this.getBuild(id, depotDto, version, (qb) => qb.select('build.id'));
//let clock = moment();
//this.log('part 1');
const tryExactArchiveQuery = this.db
.getRepository(Archive)
.createQueryBuilder('archive')
.select(['archive.hash', 'archive.path', 'archive.size'])
.where('archive.buildId = :buildId', { buildId: build.id })
.andWhere('archive.role != :partRole', { partRole: ArchiveType.Part });
/*.addSelect(`array(${qb
.subQuery()
.select('file.path')
.from(ArchiveFile, 'file')
.where('file.archiveId = archive.id')
.getQuery()})`, 'allFiles')*/
tryExactArchiveQuery
.andWhere(
`:requestedFiles = array(${tryExactArchiveQuery
.subQuery()
.select('file.path')
.from(ArchiveFile, 'file')
.where('file.archiveId = archive.id')
.getQuery()})`,
{ requestedFiles: requestedFiles }
)
// .orderBy('archive.size', 'ASC')
.limit(1);
const tryExactArchives = await tryExactArchiveQuery.getMany();
if (tryExactArchives.length) {
return {
cdnUrl: this.cdnUrl,
archives: tryExactArchives,
};
}
const allPartArchives = await this.getArchives(id, depotDto, version, (qb) =>
qb
.andWhere(':requestedFiles && archive.files', { requestedFiles: requestedFiles })
.andWhere('archive.role = :partRole', { partRole: ArchiveType.Part })
);
const [fullArchive] = await this.getArchives(id, depotDto, version, (qb) =>
qb.andWhere('archive.role = :fullRole', { fullRole: ArchiveType.Full })
//this.log(`Time used: ${moment().diff(clock, 'seconds')} s`);
/*clock = moment();
this.log('part ex');
const archiveIds: number[] = (
await this.db
.createQueryBuilder()
.select('distinct(file.archiveId)', 'archiveId')
.from(ArchiveFile, 'file')
.where('file.path = any(:requestedFiles)', { requestedFiles: requestedFiles })
.innerJoin('file.archive', 'archive')
.andWhere('archive.buildId = :buildId', { buildId: build.id })
.getRawMany()
).map((obj) => obj.archiveId);
this.log(`Time used: ${moment().diff(clock, 'seconds')} s`);*/
//clock = moment();
//this.log('part 2');
const allPartArchivesQuery = this.db
.getRepository(Archive)
.createQueryBuilder('archive')
.select(['archive.hash', 'archive.path', 'archive.size'])
.where('archive.buildId = :buildId', { buildId: build.id })
.andWhere('archive.role = :partRole', { partRole: ArchiveType.Part });
//.innerJoin('archive.containingFiles', 'file')
//.andWhere('file.path = any(:requestedFiles)', { requestedFiles: requestedFiles });
allPartArchivesQuery.andWhere(
`archive.id in (${allPartArchivesQuery
.subQuery()
.select('distinct(file.archiveId)')
.from(ArchiveFile, 'file')
.where('file.path = any(:requestedFiles)', { requestedFiles: requestedFiles })
.getQuery()})`
);
const allPartArchives = await allPartArchivesQuery.getMany();
//this.log(`Time used: ${moment().diff(clock, 'seconds')} s`);
//clock = moment();
//this.log('part 3');
const fullArchive = await this.db
.getRepository(Archive)
.createQueryBuilder('archive')
.select(['archive.hash', 'archive.path', 'archive.size'])
.where('archive.buildId = :buildId', { buildId: build.id })
.andWhere('archive.role = :fullRole', { fullRole: ArchiveType.Full })
.limit(1)
.getOne();
let archives = allPartArchives;
if (fullArchive && this.getCostOfArchives([fullArchive]) <= this.getCostOfArchives(allPartArchives)) {
archives = [fullArchive];
}
//this.log(`Time used: ${moment().diff(clock, 'seconds')} s`);
return {
cdnUrl: this.cdnUrl,
archives,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment