Skip to content
This repository has been archived by the owner on Dec 16, 2023. It is now read-only.

Commit

Permalink
Fix dpkg close stream and github update
Browse files Browse the repository at this point in the history
  • Loading branch information
Sirherobrine23 committed Apr 17, 2023
1 parent d65c234 commit 24b68f6
Show file tree
Hide file tree
Showing 10 changed files with 517 additions and 526 deletions.
7 changes: 2 additions & 5 deletions .github/workflows/publishNpm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,14 @@ jobs:
- name: Edit version
run: npm run update_version -- ${{ github.event.release.tag_name }} --env_file="$GITHUB_ENV"

- name: Build package
run: npm run build
- name: Pack package
run: npm pack --workspaces

- name: Publish package
run: npm publish --workspaces --access public --tag ${{ github.event.release.prerelease && 'next' || 'latest' }}
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

- name: Pack package
run: npm pack --workspaces

- name: Upload package
uses: actions/upload-artifact@v3
with:
Expand Down
252 changes: 211 additions & 41 deletions packages/ar/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
export * as localFile from "./local.js";
export * as local from "./local.js";
import oldFs, { promises as fs } from "node:fs";
import { extendsFS } from "@sirherobrine23/extends";
import { finished } from "node:stream/promises";
import { format } from "node:util";
import stream from "node:stream";
import path from "node:path";
import { createHead } from "./local.js";
export {createHead};

export type arHeader = {
name: string,
Expand All @@ -18,6 +14,40 @@ export type arHeader = {
size: number,
};

export type fileInfo = {size: number, mtime?: Date|string, mode?: string, owner?: number, group?: number}
export function createHead(filename: string, info: fileInfo) {
info.mtime ||= new Date();
info.mode ||= "644";
info.owner ||= 0;
if (info.owner < 0) info.owner = 0;
info.group ||= 0;
if (info.group < 0) info.group = 0;
const controlHead = Buffer.alloc(60, 0x20);

// Filename
controlHead.write(path.basename(filename), 0, 16, "ascii");

// Timestamp
if (info.mtime instanceof Date) controlHead.write((info.mtime.getTime()/1000).toFixed(), 16, 12); else controlHead.write(info.mtime, 16, 12);

// Owner ID
controlHead.write(info.owner.toString(), 28, 6);

// Group ID
controlHead.write(info.group.toString(), 34, 6);

// File mode
controlHead.write(info.mode, 40, 8);

// File size
controlHead.write(info.size.toString(), 48, 10);

// ending
controlHead[58] = 0x60; controlHead[59] = 0x0A;
return controlHead;
}


export declare interface arParse extends stream.Writable {
on(event: "close", listener: () => void): this;
on(event: "drain", listener: () => void): this;
Expand All @@ -38,7 +68,6 @@ export declare interface arParse extends stream.Writable {
once(event: string | symbol, listener: (...args: any[]) => void): this;
}

export default parse;
/**
* Parse ar file and return file stream on entry event
*
Expand All @@ -51,7 +80,7 @@ export default parse;
});
*
*/
export function parse(): arParse {
export function parseArStream(): arParse {
let initialHead = true, oldBuffer: Buffer, fileStream: stream.Readable, fileStreamSize: number;
return new stream.Writable({
defaultEncoding: "binary",
Expand Down Expand Up @@ -163,47 +192,188 @@ export function parse(): arParse {
});
}

export class arStream extends stream.Readable {
constructor(onRedable?: (this: arStream) => void) {
super({autoDestroy: true, read(){}});
this.push(Buffer.from([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A]));
Promise.resolve().then(() => onRedable.call(this)).catch(err => this.emit("error", err));
}
#lockWrite = false;
close() {
this.#lockWrite = true;
this.push(null);
}
entry(filename: string, size: number, mtime?: Date) {
if (this.#lockWrite) throw new Error("Write locked");
this.#lockWrite = true;
this.push(createHead(filename, {size, mtime}), "binary");
export interface arStream extends stream.Readable {
close(): void;
entry(...args: Parameters<typeof createHead>): stream.Writable;
addLocalFile(filePath: string, filename?: string): Promise<void>;
getEntrys(): {[fileName: string]: fileInfo};
}

/**
* Create ar file and return file stream
*
* @param onRedable - Callback with ar stream
* @returns
*/
export function createArStream(onRedable?: (ar: arStream, callback: (err?: any) => void) => void) {
const entrys = new Map<string, fileInfo>();
let lockWrite = false;
return new (class arStream extends stream.Readable {
constructor() {
super({autoDestroy: true, read() {}})
this.push(Buffer.from([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A]), "binary");
if (typeof onRedable === "function") Promise.resolve().then(() => onRedable(this, this.#callback)).catch(this.#callback);
}
#callback(err?: any) {
lockWrite = true;
if (err) this.emit("error", err);
this.push(null);
};
close() {this.#callback();}
getEntrys() {
return Array.from(entrys.keys()).reduce<{[fileName: string]: fileInfo}>((acc, key) => {
acc[key] = entrys.get(key);
return acc;
}, {});
}
entry(filename, info) {
if (entrys.has(filename)) throw new Error("File are exists in ar file!");
else if (lockWrite) throw new Error("Write locked");
lockWrite = true;
this.push(createHead(filename, info), "binary");
entrys.set(filename, {...info});
return new stream.Writable({
autoDestroy: true,
decodeStrings: false,
emitClose: true,
write: (chunk, encoding, callback) => {
this.push(chunk, encoding);
callback();
},
final: (callback) => {
lockWrite = false;
if (entrys.get(filename).size & 1) this.push("\n");
callback();
},
destroy: (error, callback) => {
if (!!error) this.emit("error", error);
callback(error);
},
});
}
async addLocalFile(filePath: string, filename = path.basename(filePath)) {
if (!(await extendsFS.isFile(filePath))) throw new Error("path is not file!");
const stats = await fs.stat(filePath);
await finished(oldFs.createReadStream(filePath).pipe(this.entry(filename, {
size: stats.size,
mtime: stats.mtime,
owner: stats.uid || 0,
group: stats.gid || 0
})));
}
})();
}

/**
* Create ar file localy
*
* @param filePath - File path
* @returns
*/
export async function createFile(filePath: string) {
await fs.writeFile(filePath, Buffer.from([0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A]), "binary");
let lock = false;
const entrys = new Map<string, {mtime?: Date|string, size: number, mode?: string}>();
async function entry(filename: string, info: {mtime?: Date|string, size: number, mode?: string}) {
if (lock) throw new Error("Wait before entry end!");
if (entrys.has((filename = path.basename(filename)))) throw new Error("File ared exists");
entrys.set(filename, {...info});
lock = true;
await fs.appendFile(filePath, createHead(filename, info));
const src = oldFs.createWriteStream(filePath, {flags: "a+"});
return new stream.Writable({
autoDestroy: true,
decodeStrings: false,
emitClose: true,
write: (chunk, encoding, callback) => {
this.push(chunk, encoding);
callback();
write(chunk, encoding, callback) {
return src.write(chunk, encoding, callback);
},
final: (callback) => {
this.#lockWrite = false;
if (size & 1) this.push("\n");
async final(callback) {
if (entrys.get(filename).size & 1) await fs.appendFile(filePath, "\n");
lock = false;
callback();
},
destroy: (error, callback) => {
if (!!error) this.emit("error", error);
callback(error);
src.close(callback);
},
});
}
async addLocalFile(filePath: string, filename = path.basename(filePath)) {
if (!(await extendsFS.isFile(filePath))) throw new Error("path is not file!");
const stats = await fs.stat(filePath);
await finished(oldFs.createReadStream(filePath).pipe(this.entry(filename, stats.size, stats.mtime)));
}

return {
entry,
getEntrys() {
return Array.from(entrys.keys()).reduce<{[filename: string]: ReturnType<typeof entrys.get>}>((acc, key) => {
acc[key] = entrys.get(key);
return acc;
}, {});
}
};
}

export function createStream(...args: ConstructorParameters<typeof arStream>) {
return new arStream(...args);
/**
* Parse file and return headers array with function with file stream
*
* @param filePath - File path
*/
export async function parseFile(filePath: string) {
if (!((await extendsFS.readFile(filePath, 0, 8)).toString("ascii").startsWith("!<arch>\n"))) throw new Error("Invalid ar file, invalid magic head!");
const heads: {head: arHeader, start: number, end: number}[] = [];

let oldBuffer: Buffer, fileStreamSize: number = 0, offset = 8;
await finished(oldFs.createReadStream(filePath, {start: 8}).pipe(new stream.Writable({
write(chunk, _encoding, callback) {
if (oldBuffer) chunk = Buffer.concat([oldBuffer, chunk]); oldBuffer = undefined;
offset += Buffer.byteLength(chunk);
if (fileStreamSize >= 1) {
const fixedChunk = chunk.subarray(0, fileStreamSize);
fileStreamSize -= fixedChunk.byteLength;
chunk = chunk.subarray(fixedChunk.length);
if (fileStreamSize <= 0) fileStreamSize = 0;
if (chunk.length <= 0) return callback();;
}
if (chunk.length >= 60) {
for (let chunkByte = 0; chunkByte < chunk.length; chunkByte++) {
const lastByteHead = chunkByte;
const fistCharByte = lastByteHead-60;
if (fistCharByte < 0) continue;
const head = chunk.subarray(fistCharByte, lastByteHead);
const name = head.subarray(0, 16).toString("ascii").trim();
const time = new Date(parseInt(head.subarray(16, 28).toString("ascii").trim()) * 1000);
const owner = parseInt(head.subarray(28, 34).toString("ascii").trim());
const group = parseInt(head.subarray(34, 40).toString("ascii").trim());
const mode = parseInt(head.subarray(40, 48).toString("ascii").trim());
const size = parseInt(head.subarray(48, 58).toString("ascii").trim());
if ((!name)||(time.toString() === "Invalid Date")||(isNaN(owner))||(isNaN(group))||(isNaN(mode))||(isNaN(size))) continue;
if (head.subarray(58, 60).toString("ascii") !== "`\n") continue;
chunk = chunk.subarray(lastByteHead);
heads.push({
start: offset-chunk.length,
end: (offset-chunk.length-1)+size,
head: {
name,
time,
owner,
group,
mode,
size
}
});
const fileSize = chunk.subarray(0, size);
fileStreamSize = size - fileSize.byteLength;
chunk = chunk.subarray(fileSize.length);
chunkByte = 0;
}
}

// Get more buffer data
if (chunk.length > 0) oldBuffer = chunk;
callback();
},
})));
return heads.map(info => ({
head: info.head,
startOn: info.start,
endOf: info.end,
getFile() {
return oldFs.createReadStream(filePath, {
start: info.start,
end: info.end
});
}
}));
}
Loading

0 comments on commit 24b68f6

Please sign in to comment.