Commit f6959604 authored by nanahira's avatar nanahira

so far

parent 18cccde2
...@@ -9,6 +9,9 @@ import { AppHistory } from './entities/AppHistory.entity'; ...@@ -9,6 +9,9 @@ import { AppHistory } from './entities/AppHistory.entity';
import { PackagerService } from './packager/packager.service'; import { PackagerService } from './packager/packager.service';
import { AssetsS3Service } from './assets-s3/assets-s3.service'; import { AssetsS3Service } from './assets-s3/assets-s3.service';
import { PackageS3Service } from './package-s3/package-s3.service'; import { PackageS3Service } from './package-s3/package-s3.service';
import { Archive } from './entities/Archive.entity';
import { Build } from './entities/Build.entity';
import { Depot } from './entities/Depot.entity';
const configModule = ConfigModule.forRoot(); const configModule = ConfigModule.forRoot();
...@@ -22,7 +25,7 @@ const configModule = ConfigModule.forRoot(); ...@@ -22,7 +25,7 @@ const configModule = ConfigModule.forRoot();
useFactory: async (config: ConfigService) => { useFactory: async (config: ConfigService) => {
return { return {
type: 'postgres', type: 'postgres',
entities: [App, AppHistory], // entities here entities: [App, AppHistory, Archive, Build, Depot], // entities here
synchronize: !config.get('DB_NO_INIT'), synchronize: !config.get('DB_NO_INIT'),
host: config.get('DB_HOST'), host: config.get('DB_HOST'),
port: parseInt(config.get('DB_PORT')) || 5432, port: parseInt(config.get('DB_PORT')) || 5432,
......
import { Column, Entity, ManyToOne, PrimaryGeneratedColumn } from 'typeorm'; import { Column, Entity, ManyToOne, PrimaryGeneratedColumn } from 'typeorm';
import { Build } from './Build.entity'; import { Build } from './Build.entity';
import { Index } from 'typeorm/browser'; import { Index } from 'typeorm';
import { TimeBase } from './TimeBase.entity';
export enum ArchiveType { export enum ArchiveType {
Full = 'full', Full = 'full',
...@@ -9,7 +10,7 @@ export enum ArchiveType { ...@@ -9,7 +10,7 @@ export enum ArchiveType {
} }
@Entity() @Entity()
export class Archive { export class Archive extends TimeBase {
@PrimaryGeneratedColumn() @PrimaryGeneratedColumn()
id: number; id: number;
......
import { Column, Entity, ManyToOne, OneToMany, PrimaryGeneratedColumn } from 'typeorm'; import { Column, Entity, ManyToOne, OneToMany, PrimaryGeneratedColumn } from 'typeorm';
import { Depot } from './Depot.entity'; import { Depot } from './Depot.entity';
import { Archive } from './Archive.entity'; import { Archive } from './Archive.entity';
import { Index } from 'typeorm/browser'; import { Index } from 'typeorm';
import { TimeBase } from './TimeBase.entity';
@Entity() @Entity()
export class Build { export class Build extends TimeBase {
@PrimaryGeneratedColumn() @PrimaryGeneratedColumn()
id: number; id: number;
......
import { Column, Entity, Index, ManyToOne, OneToMany, PrimaryGeneratedColumn } from 'typeorm'; import { Column, Entity, Index, ManyToOne, OneToMany, PrimaryGeneratedColumn } from 'typeorm';
import { App } from './App.entity'; import { App } from './App.entity';
import { Build } from './Build.entity'; import { Build } from './Build.entity';
import { TimeBase } from './TimeBase.entity';
@Index((d) => [d.app, d.locale, d.platform.d.arch], { unique: true }) @Index((d) => [d.app, d.locale, d.platform, d.arch], { unique: true })
@Entity() @Entity()
export class Depot { export class Depot extends TimeBase {
@PrimaryGeneratedColumn() @PrimaryGeneratedColumn()
id: number; id: number;
......
...@@ -20,6 +20,35 @@ export interface FileWithHash { ...@@ -20,6 +20,35 @@ export interface FileWithHash {
hash: string; hash: string;
} }
export class ArchiveTask {
readonly path: string;
constructor(public readonly role: ArchiveType, public readonly files: FileWithHash[], public readonly altFiles?: string[]) {
this.path =
createHash('sha512')
.update(files.map((f) => `${f.file.path}${f.hash}`).join(''))
.digest('hex') + '.tar.gz';
}
get filePaths() {
return this.altFiles || this.files.map((f) => f.file.path);
}
get archive() {
const archive = new Archive();
archive.path = this.path;
archive.role = this.role;
archive.files = this.filePaths;
return archive;
}
addToTask(archiveTasks: ArchiveTask[]) {
if (archiveTasks.some((t) => t.path === this.path)) {
return;
}
archiveTasks.push(this);
}
}
@Injectable() @Injectable()
export class PackagerService extends ConsoleLogger { export class PackagerService extends ConsoleLogger {
bucket_max = 10 * 1024 ** 2; bucket_max = 10 * 1024 ** 2;
...@@ -55,13 +84,20 @@ export class PackagerService extends ConsoleLogger { ...@@ -55,13 +84,20 @@ export class PackagerService extends ConsoleLogger {
directories.map((d) => d.path), directories.map((d) => d.path),
files.map((f) => f.path) files.map((f) => f.path)
); );
const promises: Promise<Archive>[] = []; const archiveTasks: ArchiveTask[] = [];
const filesWithHash: FileWithHash[] = files.map((f) => ({ file: f, hash: checksum[f.path] })); const filesWithHash: FileWithHash[] = files.map((f) => ({ file: f, hash: checksum[f.path] }));
// 整包 // 整包
//packagesSequence.push(files.map((f) => f.path)); new ArchiveTask(ArchiveType.Full, filesWithHash, await fs.promises.readdir(root)).addToTask(archiveTasks);
promises.push(this.archive(ArchiveType.Full, root, tarballRoot, filesWithHash, await fs.promises.readdir(root)));
// 更新包
for (const lastChecksum of lastBuildChecksums) {
const changedFiles = filesWithHash.filter((f) => !lastChecksum[f.file.path] || lastChecksum[f.file.path] !== f.hash);
if (changedFiles.length) {
new ArchiveTask(ArchiveType.Update, changedFiles).addToTask(archiveTasks);
}
}
// 散包 // 散包
const buckets: Record<string, [FileWithHash[], number]> = {}; const buckets: Record<string, [FileWithHash[], number]> = {};
...@@ -71,8 +107,7 @@ export class PackagerService extends ConsoleLogger { ...@@ -71,8 +107,7 @@ export class PackagerService extends ConsoleLogger {
buckets[extname] ??= [[], 0]; buckets[extname] ??= [[], 0];
const bucket = buckets[extname]; const bucket = buckets[extname];
if (bucket[1] + file.file.stats.size >= this.bucket_max) { if (bucket[1] + file.file.stats.size >= this.bucket_max) {
//packagesSequence.push(bucket[0].map((f) => f.file.path)); new ArchiveTask(ArchiveType.Part, bucket[0]).addToTask(archiveTasks);
promises.push(this.archive(ArchiveType.Full, root, tarballRoot, bucket[0]));
bucket[0] = []; bucket[0] = [];
bucket[1] = 0; bucket[1] = 0;
} else { } else {
...@@ -80,26 +115,16 @@ export class PackagerService extends ConsoleLogger { ...@@ -80,26 +115,16 @@ export class PackagerService extends ConsoleLogger {
bucket[1] += file.file.stats.size; bucket[1] += file.file.stats.size;
} }
} else { } else {
//packagesSequence.push([file.file.path]); new ArchiveTask(ArchiveType.Part, [file]).addToTask(archiveTasks);
promises.push(this.archive(ArchiveType.Part, root, tarballRoot, [file]));
} }
} }
for (const bucket of Object.values(buckets)) { for (const bucket of Object.values(buckets)) {
if (bucket[0].length) { if (bucket[0].length) {
//packagesSequence.push(bucket[0].map((f) => f.file.path)); new ArchiveTask(ArchiveType.Part, bucket[0]).addToTask(archiveTasks);
promises.push(this.archive(ArchiveType.Part, root, tarballRoot, bucket[0]));
}
}
// 更新包
for (const lastChecksum of lastBuildChecksums) {
const changedFiles = filesWithHash.filter((f) => !lastChecksum[f.file.path] || lastChecksum[f.file.path] !== f.hash);
if (changedFiles.length) {
promises.push(this.archive(ArchiveType.Update, root, tarballRoot, changedFiles));
} }
} }
const packages = await Promise.all(promises); // 这个 await 过后,checksum 和 打包上传都已经跑完了 const packages = await Promise.all(archiveTasks.map((t) => this.archive(root, tarballRoot, t))); // 这个 await 过后,checksum 和 打包上传都已经跑完了
//for (let i = 0; i < packagesSequence.length; ++i) { //for (let i = 0; i < packagesSequence.length; ++i) {
// packages[gotPackages[i]] = packagesSequence[i]; // packages[gotPackages[i]] = packagesSequence[i];
...@@ -120,24 +145,18 @@ export class PackagerService extends ConsoleLogger { ...@@ -120,24 +145,18 @@ export class PackagerService extends ConsoleLogger {
]); ]);
} }
async archive(role: ArchiveType, root: string, tarballRoot: string, files: FileWithHash[], altFiles?: string[]): Promise<Archive> { async archive(root: string, tarballRoot: string, archiveTask: ArchiveTask): Promise<Archive> {
const archiveName = const archive = archiveTask.archive;
createHash('sha512') const archiveName = archiveTask.path;
.update(files.map((f) => `${f.file.path}${f.hash}`).join(''))
.digest('hex') + '.tar.gz';
const filePaths = files.map((f) => f.file.path);
const archive = new Archive();
archive.path = archiveName;
archive.role = role;
archive.files = filePaths;
const existing = await this.s3.fileExists(archiveName); const existing = await this.s3.fileExists(archiveName);
if (existing) { if (existing) {
archive.size = existing.Size; archive.size = existing.Size;
return archive; return archive;
} }
const files = archiveTask.filePaths;
const archivePath = path.join(tarballRoot, archiveName); const archivePath = path.join(tarballRoot, archiveName);
this.log(`Packaging archive ${archivePath} with ${files.length} files.`); this.log(`Packaging archive ${archivePath} with ${files.length} files.`);
await this.spawnAsync('tar', ['-zcvf', archivePath].concat(altFiles || filePaths), { await this.spawnAsync('tar', ['-zcvf', archivePath].concat(files), {
cwd: root, cwd: root,
}); });
const fileSize = (await fs.promises.stat(archivePath)).size; const fileSize = (await fs.promises.stat(archivePath)).size;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment