Commit 18cccde2 authored by nanahira's avatar nanahira

use entity and update package

parent b3c7fabe
import { Archive } from '../entities/Archive.entity';
export class PackageResult { export class PackageResult {
constructor(public checksum: Record<string, string>, public packages: Record<string, string[]>, public fullPackage: string) {} constructor(public checksum: Record<string, string>, public archives: Archive[]) {}
} }
...@@ -20,6 +20,9 @@ export class Archive { ...@@ -20,6 +20,9 @@ export class Archive {
@Column('varchar', { length: 140 }) @Column('varchar', { length: 140 })
path: string; path: string;
@Column('int', { unsigned: true })
size: number;
@ManyToOne((type) => Build, (build) => build.archives) @ManyToOne((type) => Build, (build) => build.archives)
build: Build; build: Build;
......
import { MyCardAppMaintainerGuard } from './my-card-app-maintainer.guard';
describe('MyCardAppMaintainerGuard', () => {
it('should be defined', () => {
expect(new MyCardAppMaintainerGuard()).toBeDefined();
});
});
...@@ -13,6 +13,7 @@ import readdirp from 'readdirp'; ...@@ -13,6 +13,7 @@ import readdirp from 'readdirp';
import { ConsoleLogger, Injectable } from '@nestjs/common'; import { ConsoleLogger, Injectable } from '@nestjs/common';
import { createHash } from 'crypto'; import { createHash } from 'crypto';
import { Archive, ArchiveType } from '../entities/Archive.entity';
export interface FileWithHash { export interface FileWithHash {
file: readdirp.EntryInfo; file: readdirp.EntryInfo;
...@@ -30,7 +31,7 @@ export class PackagerService extends ConsoleLogger { ...@@ -30,7 +31,7 @@ export class PackagerService extends ConsoleLogger {
this.bucket_enter = (parseInt(config.get('PACKAGE_BUCKET_ENTER')) || 1) * 1024 ** 2; this.bucket_enter = (parseInt(config.get('PACKAGE_BUCKET_ENTER')) || 1) * 1024 ** 2;
} }
async build(stream: internal.Readable, pathPrefix?: string): Promise<PackageResult> { async build(stream: internal.Readable, pathPrefix?: string, lastBuildChecksums: Record<string, string>[] = []): Promise<PackageResult> {
this.log(`Start packaging.`); this.log(`Start packaging.`);
const root = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'mycard-console-')); const root = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'mycard-console-'));
const tarballRoot = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'mycard-console-tarball-')); const tarballRoot = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'mycard-console-tarball-'));
...@@ -44,7 +45,7 @@ export class PackagerService extends ConsoleLogger { ...@@ -44,7 +45,7 @@ export class PackagerService extends ConsoleLogger {
this.log(`Package extracted to ${extractRoot}.`); this.log(`Package extracted to ${extractRoot}.`);
const packagesSequence: string[][] = []; //const packagesSequence: string[][] = [];
const entries = await readdirp.promise(root, { alwaysStat: true, type: 'files_directories' }); const entries = await readdirp.promise(root, { alwaysStat: true, type: 'files_directories' });
const [directories, files] = _.partition(entries, (item) => item.stats.isDirectory()); const [directories, files] = _.partition(entries, (item) => item.stats.isDirectory());
...@@ -54,13 +55,13 @@ export class PackagerService extends ConsoleLogger { ...@@ -54,13 +55,13 @@ export class PackagerService extends ConsoleLogger {
directories.map((d) => d.path), directories.map((d) => d.path),
files.map((f) => f.path) files.map((f) => f.path)
); );
const promises: Promise<string>[] = []; const promises: Promise<Archive>[] = [];
const filesWithHash: FileWithHash[] = files.map((f) => ({ file: f, hash: checksum[f.path] })); const filesWithHash: FileWithHash[] = files.map((f) => ({ file: f, hash: checksum[f.path] }));
// 整包 // 整包
packagesSequence.push(files.map((f) => f.path)); //packagesSequence.push(files.map((f) => f.path));
promises.push(this.archive(root, tarballRoot, filesWithHash, await fs.promises.readdir(root))); promises.push(this.archive(ArchiveType.Full, root, tarballRoot, filesWithHash, await fs.promises.readdir(root)));
// 散包 // 散包
const buckets: Record<string, [FileWithHash[], number]> = {}; const buckets: Record<string, [FileWithHash[], number]> = {};
...@@ -70,8 +71,8 @@ export class PackagerService extends ConsoleLogger { ...@@ -70,8 +71,8 @@ export class PackagerService extends ConsoleLogger {
buckets[extname] ??= [[], 0]; buckets[extname] ??= [[], 0];
const bucket = buckets[extname]; const bucket = buckets[extname];
if (bucket[1] + file.file.stats.size >= this.bucket_max) { if (bucket[1] + file.file.stats.size >= this.bucket_max) {
packagesSequence.push(bucket[0].map((f) => f.file.path)); //packagesSequence.push(bucket[0].map((f) => f.file.path));
promises.push(this.archive(root, tarballRoot, bucket[0])); promises.push(this.archive(ArchiveType.Full, root, tarballRoot, bucket[0]));
bucket[0] = []; bucket[0] = [];
bucket[1] = 0; bucket[1] = 0;
} else { } else {
...@@ -79,30 +80,35 @@ export class PackagerService extends ConsoleLogger { ...@@ -79,30 +80,35 @@ export class PackagerService extends ConsoleLogger {
bucket[1] += file.file.stats.size; bucket[1] += file.file.stats.size;
} }
} else { } else {
packagesSequence.push([file.file.path]); //packagesSequence.push([file.file.path]);
promises.push(this.archive(root, tarballRoot, [file])); promises.push(this.archive(ArchiveType.Part, root, tarballRoot, [file]));
} }
} }
for (const bucket of Object.values(buckets)) { for (const bucket of Object.values(buckets)) {
if (bucket[0].length) { if (bucket[0].length) {
packagesSequence.push(bucket[0].map((f) => f.file.path)); //packagesSequence.push(bucket[0].map((f) => f.file.path));
promises.push(this.archive(root, tarballRoot, bucket[0])); promises.push(this.archive(ArchiveType.Part, root, tarballRoot, bucket[0]));
} }
} }
// TODO: 更新包 // 更新包
for (const lastChecksum of lastBuildChecksums) {
const changedFiles = filesWithHash.filter((f) => !lastChecksum[f.file.path] || lastChecksum[f.file.path] !== f.hash);
if (changedFiles.length) {
promises.push(this.archive(ArchiveType.Update, root, tarballRoot, changedFiles));
}
}
const gotPackages = await Promise.all(promises); // 这个 await 过后,checksum 和 打包上传都已经跑完了 const packages = await Promise.all(promises); // 这个 await 过后,checksum 和 打包上传都已经跑完了
const packages: Record<string, string[]> = {}; //for (let i = 0; i < packagesSequence.length; ++i) {
for (let i = 0; i < packagesSequence.length; ++i) { // packages[gotPackages[i]] = packagesSequence[i];
packages[gotPackages[i]] = packagesSequence[i]; //}
}
// this.log({ checksum, packages }); // this.log({ checksum, packages });
await fs.promises.rm(root, { recursive: true }); await fs.promises.rm(root, { recursive: true });
await fs.promises.rm(tarballRoot, { recursive: true }); await fs.promises.rm(tarballRoot, { recursive: true });
return new PackageResult(checksum, packages, gotPackages[0]); return new PackageResult(checksum, packages);
} }
async checksum(root: string, directories: string[], files: string[]): Promise<Record<string, string>> { async checksum(root: string, directories: string[], files: string[]): Promise<Record<string, string>> {
...@@ -114,21 +120,32 @@ export class PackagerService extends ConsoleLogger { ...@@ -114,21 +120,32 @@ export class PackagerService extends ConsoleLogger {
]); ]);
} }
async archive(root: string, tarballRoot: string, files: FileWithHash[], altFiles?: string[]): Promise<string> { async archive(role: ArchiveType, root: string, tarballRoot: string, files: FileWithHash[], altFiles?: string[]): Promise<Archive> {
const archive = const archiveName =
createHash('sha512') createHash('sha512')
.update(files.map((f) => `${f.file.path}${f.hash}`).join('')) .update(files.map((f) => `${f.file.path}${f.hash}`).join(''))
.digest('hex') + '.tar.gz'; .digest('hex') + '.tar.gz';
if (await this.s3.fileExists(archive)) { const filePaths = files.map((f) => f.file.path);
const archive = new Archive();
archive.path = archiveName;
archive.role = role;
archive.files = filePaths;
const existing = await this.s3.fileExists(archiveName);
if (existing) {
archive.size = existing.Size;
return archive; return archive;
} }
const archivePath = path.join(tarballRoot, archive); const archivePath = path.join(tarballRoot, archiveName);
this.log(`Packaging archive ${archivePath} with ${files.length} files.`); this.log(`Packaging archive ${archivePath} with ${files.length} files.`);
await this.spawnAsync('tar', ['-zcvf', archivePath].concat(altFiles || files.map((f) => f.file.path)), { await this.spawnAsync('tar', ['-zcvf', archivePath].concat(altFiles || filePaths), {
cwd: root, cwd: root,
}); });
const fileSize = (await fs.promises.stat(archivePath)).size; const fileSize = (await fs.promises.stat(archivePath)).size;
await this.s3.uploadFile(archive, fs.createReadStream(archivePath), { ContentType: 'application/tar+gzip', ContentLength: fileSize }); await this.s3.uploadFile(archiveName, fs.createReadStream(archivePath), {
ContentType: 'application/tar+gzip',
ContentLength: fileSize,
});
archive.size = fileSize;
return archive; return archive;
} }
......
...@@ -42,7 +42,7 @@ export class S3Service extends ConsoleLogger { ...@@ -42,7 +42,7 @@ export class S3Service extends ConsoleLogger {
async fileExists(path: string) { async fileExists(path: string) {
const objects = await this.listObjects(path); const objects = await this.listObjects(path);
// this.log(objects); // this.log(objects);
return objects.Contents && objects.Contents.some((obj) => obj.Key === this.getPathWithPrefix(path)); return objects.Contents ? objects.Contents.find((obj) => obj.Key === this.getPathWithPrefix(path)) : null;
} }
private getPathWithPrefix(filename: string) { private getPathWithPrefix(filename: string) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment