mirror of
https://github.com/tvytlx/ai-agent-deep-dive.git
synced 2026-04-05 00:24:50 +08:00
Add extracted source directory and README navigation
This commit is contained in:
115
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/files.js
generated
vendored
Normal file
115
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/files.js
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
import { existsSync, readdirSync, readFileSync, statSync } from "fs";
|
||||
import ignore from "ignore";
|
||||
import { join, relative, sep } from "path";
|
||||
// Files/patterns to exclude from the package
|
||||
export const EXCLUDE_PATTERNS = [
|
||||
".DS_Store",
|
||||
"Thumbs.db",
|
||||
".gitignore",
|
||||
".git",
|
||||
".mcpbignore",
|
||||
"*.log",
|
||||
".env*",
|
||||
".npm",
|
||||
".npmrc",
|
||||
".yarnrc",
|
||||
".yarn",
|
||||
".eslintrc",
|
||||
".editorconfig",
|
||||
".prettierrc",
|
||||
".prettierignore",
|
||||
".eslintignore",
|
||||
".nycrc",
|
||||
".babelrc",
|
||||
".pnp.*",
|
||||
"node_modules/.cache",
|
||||
"node_modules/.bin",
|
||||
"*.map",
|
||||
".env.local",
|
||||
".env.*.local",
|
||||
"npm-debug.log*",
|
||||
"yarn-debug.log*",
|
||||
"yarn-error.log*",
|
||||
"package-lock.json",
|
||||
"yarn.lock",
|
||||
"*.mcpb",
|
||||
"*.d.ts",
|
||||
"*.tsbuildinfo",
|
||||
"tsconfig.json",
|
||||
];
|
||||
/**
|
||||
* Read and parse .mcpbignore file patterns
|
||||
*/
|
||||
export function readMcpbIgnorePatterns(baseDir) {
|
||||
const mcpbIgnorePath = join(baseDir, ".mcpbignore");
|
||||
if (!existsSync(mcpbIgnorePath)) {
|
||||
return [];
|
||||
}
|
||||
try {
|
||||
const content = readFileSync(mcpbIgnorePath, "utf-8");
|
||||
return content
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0 && !line.startsWith("#"));
|
||||
}
|
||||
catch (error) {
|
||||
console.warn(`Warning: Could not read .mcpbignore file: ${error instanceof Error ? error.message : "Unknown error"}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
function buildIgnoreChecker(additionalPatterns) {
|
||||
return ignore().add(EXCLUDE_PATTERNS).add(additionalPatterns);
|
||||
}
|
||||
/**
|
||||
* Used for testing, calls the same methods as the other ignore checks
|
||||
*/
|
||||
export function shouldExclude(filePath, additionalPatterns = []) {
|
||||
return buildIgnoreChecker(additionalPatterns).ignores(filePath);
|
||||
}
|
||||
export function getAllFiles(dirPath, baseDir = dirPath, fileList = {}, additionalPatterns = []) {
|
||||
const files = readdirSync(dirPath);
|
||||
const ignoreChecker = buildIgnoreChecker(additionalPatterns);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file);
|
||||
const relativePath = relative(baseDir, filePath);
|
||||
if (ignoreChecker.ignores(relativePath)) {
|
||||
continue;
|
||||
}
|
||||
const stat = statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
getAllFiles(filePath, baseDir, fileList, additionalPatterns);
|
||||
}
|
||||
else {
|
||||
// Use forward slashes in zip file paths
|
||||
const zipPath = relativePath.split(sep).join("/");
|
||||
fileList[zipPath] = readFileSync(filePath);
|
||||
}
|
||||
}
|
||||
return fileList;
|
||||
}
|
||||
export function getAllFilesWithCount(dirPath, baseDir = dirPath, fileList = {}, additionalPatterns = [], ignoredCount = 0) {
|
||||
const files = readdirSync(dirPath);
|
||||
const ignoreChecker = buildIgnoreChecker(additionalPatterns);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file);
|
||||
const relativePath = relative(baseDir, filePath);
|
||||
if (ignoreChecker.ignores(relativePath)) {
|
||||
ignoredCount++;
|
||||
continue;
|
||||
}
|
||||
const stat = statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
const result = getAllFilesWithCount(filePath, baseDir, fileList, additionalPatterns, ignoredCount);
|
||||
ignoredCount = result.ignoredCount;
|
||||
}
|
||||
else {
|
||||
// Use forward slashes in zip file paths
|
||||
const zipPath = relativePath.split(sep).join("/");
|
||||
fileList[zipPath] = {
|
||||
data: readFileSync(filePath),
|
||||
mode: stat.mode,
|
||||
};
|
||||
}
|
||||
}
|
||||
return { files: fileList, ignoredCount };
|
||||
}
|
||||
333
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/sign.js
generated
vendored
Normal file
333
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/sign.js
generated
vendored
Normal file
@@ -0,0 +1,333 @@
|
||||
import { execFile } from "child_process";
|
||||
import { readFileSync, writeFileSync } from "fs";
|
||||
import { mkdtemp, rm, writeFile } from "fs/promises";
|
||||
import forge from "node-forge";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path";
|
||||
import { promisify } from "util";
|
||||
// Signature block markers
|
||||
const SIGNATURE_HEADER = "MCPB_SIG_V1";
|
||||
const SIGNATURE_FOOTER = "MCPB_SIG_END";
|
||||
const execFileAsync = promisify(execFile);
|
||||
/**
|
||||
* Signs a MCPB file with the given certificate and private key using PKCS#7
|
||||
*
|
||||
* @param mcpbPath Path to the MCPB file to sign
|
||||
* @param certPath Path to the certificate file (PEM format)
|
||||
* @param keyPath Path to the private key file (PEM format)
|
||||
* @param intermediates Optional array of intermediate certificate paths
|
||||
*/
|
||||
export function signMcpbFile(mcpbPath, certPath, keyPath, intermediates) {
|
||||
// Read the original MCPB file
|
||||
const mcpbContent = readFileSync(mcpbPath);
|
||||
// Read certificate and key
|
||||
const certificatePem = readFileSync(certPath, "utf-8");
|
||||
const privateKeyPem = readFileSync(keyPath, "utf-8");
|
||||
// Read intermediate certificates if provided
|
||||
const intermediatePems = intermediates?.map((path) => readFileSync(path, "utf-8"));
|
||||
// Create PKCS#7 signed data
|
||||
const p7 = forge.pkcs7.createSignedData();
|
||||
p7.content = forge.util.createBuffer(mcpbContent);
|
||||
// Parse and add certificates
|
||||
const signingCert = forge.pki.certificateFromPem(certificatePem);
|
||||
const privateKey = forge.pki.privateKeyFromPem(privateKeyPem);
|
||||
p7.addCertificate(signingCert);
|
||||
// Add intermediate certificates
|
||||
if (intermediatePems) {
|
||||
for (const pem of intermediatePems) {
|
||||
p7.addCertificate(forge.pki.certificateFromPem(pem));
|
||||
}
|
||||
}
|
||||
// Add signer
|
||||
p7.addSigner({
|
||||
key: privateKey,
|
||||
certificate: signingCert,
|
||||
digestAlgorithm: forge.pki.oids.sha256,
|
||||
authenticatedAttributes: [
|
||||
{
|
||||
type: forge.pki.oids.contentType,
|
||||
value: forge.pki.oids.data,
|
||||
},
|
||||
{
|
||||
type: forge.pki.oids.messageDigest,
|
||||
// Value will be auto-populated
|
||||
},
|
||||
{
|
||||
type: forge.pki.oids.signingTime,
|
||||
// Value will be auto-populated with current time
|
||||
},
|
||||
],
|
||||
});
|
||||
// Sign with detached signature
|
||||
p7.sign({ detached: true });
|
||||
// Convert to DER format
|
||||
const asn1 = forge.asn1.toDer(p7.toAsn1());
|
||||
const pkcs7Signature = Buffer.from(asn1.getBytes(), "binary");
|
||||
// Create signature block with PKCS#7 data
|
||||
const signatureBlock = createSignatureBlock(pkcs7Signature);
|
||||
// Append signature block to MCPB file
|
||||
const signedContent = Buffer.concat([mcpbContent, signatureBlock]);
|
||||
writeFileSync(mcpbPath, signedContent);
|
||||
}
|
||||
/**
|
||||
* Verifies a signed MCPB file using OS certificate store
|
||||
*
|
||||
* @param mcpbPath Path to the signed MCPB file
|
||||
* @returns Signature information including verification status
|
||||
*/
|
||||
export async function verifyMcpbFile(mcpbPath) {
|
||||
try {
|
||||
const fileContent = readFileSync(mcpbPath);
|
||||
// Find and extract signature block
|
||||
const { originalContent, pkcs7Signature } = extractSignatureBlock(fileContent);
|
||||
if (!pkcs7Signature) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Parse PKCS#7 signature
|
||||
const asn1 = forge.asn1.fromDer(pkcs7Signature.toString("binary"));
|
||||
const p7Message = forge.pkcs7.messageFromAsn1(asn1);
|
||||
// Verify it's signed data and cast to correct type
|
||||
if (!("type" in p7Message) ||
|
||||
p7Message.type !== forge.pki.oids.signedData) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Now we know it's PkcsSignedData. The types are incorrect, so we'll
|
||||
// fix them there
|
||||
const p7 = p7Message;
|
||||
// Extract certificates from PKCS#7
|
||||
const certificates = p7.certificates || [];
|
||||
if (certificates.length === 0) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Get the signing certificate (first one)
|
||||
const signingCert = certificates[0];
|
||||
// Verify PKCS#7 signature
|
||||
const contentBuf = forge.util.createBuffer(originalContent);
|
||||
try {
|
||||
p7.verify({ authenticatedAttributes: true });
|
||||
// Also verify the content matches
|
||||
const signerInfos = p7.signerInfos;
|
||||
const signerInfo = signerInfos?.[0];
|
||||
if (signerInfo) {
|
||||
const md = forge.md.sha256.create();
|
||||
md.update(contentBuf.getBytes());
|
||||
const digest = md.digest().getBytes();
|
||||
// Find the message digest attribute
|
||||
let messageDigest = null;
|
||||
for (const attr of signerInfo.authenticatedAttributes) {
|
||||
if (attr.type === forge.pki.oids.messageDigest) {
|
||||
messageDigest = attr.value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!messageDigest || messageDigest !== digest) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Convert forge certificate to PEM for OS verification
|
||||
const certPem = forge.pki.certificateToPem(signingCert);
|
||||
const intermediatePems = certificates
|
||||
.slice(1)
|
||||
.map((cert) => Buffer.from(forge.pki.certificateToPem(cert)));
|
||||
// Verify certificate chain against OS trust store
|
||||
const chainValid = await verifyCertificateChain(Buffer.from(certPem), intermediatePems);
|
||||
if (!chainValid) {
|
||||
// Signature is valid but certificate is not trusted
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Extract certificate info
|
||||
const isSelfSigned = signingCert.issuer.getField("CN")?.value ===
|
||||
signingCert.subject.getField("CN")?.value;
|
||||
return {
|
||||
status: isSelfSigned ? "self-signed" : "signed",
|
||||
publisher: signingCert.subject.getField("CN")?.value || "Unknown",
|
||||
issuer: signingCert.issuer.getField("CN")?.value || "Unknown",
|
||||
valid_from: signingCert.validity.notBefore.toISOString(),
|
||||
valid_to: signingCert.validity.notAfter.toISOString(),
|
||||
fingerprint: forge.md.sha256
|
||||
.create()
|
||||
.update(forge.asn1.toDer(forge.pki.certificateToAsn1(signingCert)).getBytes())
|
||||
.digest()
|
||||
.toHex(),
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to verify MCPB file: ${error}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Creates a signature block buffer with PKCS#7 signature
|
||||
*/
|
||||
function createSignatureBlock(pkcs7Signature) {
|
||||
const parts = [];
|
||||
// Header
|
||||
parts.push(Buffer.from(SIGNATURE_HEADER, "utf-8"));
|
||||
// PKCS#7 signature length and data
|
||||
const sigLengthBuffer = Buffer.alloc(4);
|
||||
sigLengthBuffer.writeUInt32LE(pkcs7Signature.length, 0);
|
||||
parts.push(sigLengthBuffer);
|
||||
parts.push(pkcs7Signature);
|
||||
// Footer
|
||||
parts.push(Buffer.from(SIGNATURE_FOOTER, "utf-8"));
|
||||
return Buffer.concat(parts);
|
||||
}
|
||||
/**
|
||||
* Extracts the signature block from a signed MCPB file
|
||||
*/
|
||||
export function extractSignatureBlock(fileContent) {
|
||||
// Look for signature footer at the end
|
||||
const footerBytes = Buffer.from(SIGNATURE_FOOTER, "utf-8");
|
||||
const footerIndex = fileContent.lastIndexOf(footerBytes);
|
||||
if (footerIndex === -1) {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
// Look for signature header before footer
|
||||
const headerBytes = Buffer.from(SIGNATURE_HEADER, "utf-8");
|
||||
let headerIndex = -1;
|
||||
// Search backwards from footer
|
||||
for (let i = footerIndex - 1; i >= 0; i--) {
|
||||
if (fileContent.slice(i, i + headerBytes.length).equals(headerBytes)) {
|
||||
headerIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (headerIndex === -1) {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
// Extract original content (everything before signature block)
|
||||
const originalContent = fileContent.slice(0, headerIndex);
|
||||
// Parse signature block
|
||||
let offset = headerIndex + headerBytes.length;
|
||||
try {
|
||||
// Read PKCS#7 signature length
|
||||
const sigLength = fileContent.readUInt32LE(offset);
|
||||
offset += 4;
|
||||
// Read PKCS#7 signature
|
||||
const pkcs7Signature = fileContent.slice(offset, offset + sigLength);
|
||||
return {
|
||||
originalContent,
|
||||
pkcs7Signature,
|
||||
};
|
||||
}
|
||||
catch {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Verifies certificate chain against OS trust store
|
||||
*/
|
||||
export async function verifyCertificateChain(certificate, intermediates) {
|
||||
let tempDir = null;
|
||||
try {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "mcpb-verify-"));
|
||||
const certChainPath = join(tempDir, "chain.pem");
|
||||
const certChain = [certificate, ...(intermediates || [])].join("\n");
|
||||
await writeFile(certChainPath, certChain);
|
||||
// Platform-specific verification
|
||||
if (process.platform === "darwin") {
|
||||
try {
|
||||
await execFileAsync("security", [
|
||||
"verify-cert",
|
||||
"-c",
|
||||
certChainPath,
|
||||
"-p",
|
||||
"codeSign",
|
||||
]);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (process.platform === "win32") {
|
||||
const psCommand = `
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$certCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
|
||||
$certCollection.Import('${certChainPath}')
|
||||
|
||||
if ($certCollection.Count -eq 0) {
|
||||
Write-Error 'No certificates found'
|
||||
exit 1
|
||||
}
|
||||
|
||||
$leafCert = $certCollection[0]
|
||||
$chain = New-Object System.Security.Cryptography.X509Certificates.X509Chain
|
||||
|
||||
# Enable revocation checking
|
||||
$chain.ChainPolicy.RevocationMode = 'Online'
|
||||
$chain.ChainPolicy.RevocationFlag = 'EntireChain'
|
||||
$chain.ChainPolicy.UrlRetrievalTimeout = New-TimeSpan -Seconds 30
|
||||
|
||||
# Add code signing application policy
|
||||
$codeSignOid = New-Object System.Security.Cryptography.Oid '1.3.6.1.5.5.7.3.3'
|
||||
$chain.ChainPolicy.ApplicationPolicy.Add($codeSignOid)
|
||||
|
||||
# Add intermediate certificates to extra store
|
||||
for ($i = 1; $i -lt $certCollection.Count; $i++) {
|
||||
[void]$chain.ChainPolicy.ExtraStore.Add($certCollection[$i])
|
||||
}
|
||||
|
||||
# Build and validate chain
|
||||
$result = $chain.Build($leafCert)
|
||||
|
||||
if ($result) {
|
||||
'Valid'
|
||||
} else {
|
||||
$chain.ChainStatus | ForEach-Object {
|
||||
Write-Error "$($_.Status): $($_.StatusInformation)"
|
||||
}
|
||||
exit 1
|
||||
}
|
||||
`.trim();
|
||||
const { stdout } = await execFileAsync("powershell.exe", [
|
||||
"-NoProfile",
|
||||
"-NonInteractive",
|
||||
"-Command",
|
||||
psCommand,
|
||||
]);
|
||||
return stdout.includes("Valid");
|
||||
}
|
||||
else {
|
||||
// Linux: Use openssl
|
||||
try {
|
||||
await execFileAsync("openssl", [
|
||||
"verify",
|
||||
"-purpose",
|
||||
"codesigning",
|
||||
"-CApath",
|
||||
"/etc/ssl/certs",
|
||||
certChainPath,
|
||||
]);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
finally {
|
||||
if (tempDir) {
|
||||
try {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes signature from a MCPB file
|
||||
*/
|
||||
export function unsignMcpbFile(mcpbPath) {
|
||||
const fileContent = readFileSync(mcpbPath);
|
||||
const { originalContent } = extractSignatureBlock(fileContent);
|
||||
writeFileSync(mcpbPath, originalContent);
|
||||
}
|
||||
124
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/validate.js
generated
vendored
Normal file
124
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/validate.js
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
import { existsSync, readFileSync, statSync } from "fs";
|
||||
import * as fs from "fs/promises";
|
||||
import { DestroyerOfModules } from "galactus";
|
||||
import * as os from "os";
|
||||
import { join, resolve } from "path";
|
||||
import prettyBytes from "pretty-bytes";
|
||||
import { unpackExtension } from "../cli/unpack.js";
|
||||
import { McpbManifestSchema } from "../schemas.js";
|
||||
import { McpbManifestSchema as LooseMcpbManifestSchema } from "../schemas-loose.js";
|
||||
export function validateManifest(inputPath) {
|
||||
try {
|
||||
const resolvedPath = resolve(inputPath);
|
||||
let manifestPath = resolvedPath;
|
||||
// If input is a directory, look for manifest.json inside it
|
||||
if (existsSync(resolvedPath) && statSync(resolvedPath).isDirectory()) {
|
||||
manifestPath = join(resolvedPath, "manifest.json");
|
||||
}
|
||||
const manifestContent = readFileSync(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(manifestContent);
|
||||
const result = McpbManifestSchema.safeParse(manifestData);
|
||||
if (result.success) {
|
||||
console.log("Manifest schema validation passes!");
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
console.log("ERROR: Manifest validation failed:\n");
|
||||
result.error.issues.forEach((issue) => {
|
||||
const path = issue.path.join(".");
|
||||
console.log(` - ${path ? `${path}: ` : ""}${issue.message}`);
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ENOENT")) {
|
||||
console.error(`ERROR: File not found: ${inputPath}`);
|
||||
if (existsSync(resolve(inputPath)) &&
|
||||
statSync(resolve(inputPath)).isDirectory()) {
|
||||
console.error(` (No manifest.json found in directory)`);
|
||||
}
|
||||
}
|
||||
else if (error.message.includes("JSON")) {
|
||||
console.error(`ERROR: Invalid JSON in manifest file: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
console.error(`ERROR: Error reading manifest: ${error.message}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.error("ERROR: Unknown error occurred");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export async function cleanMcpb(inputPath) {
|
||||
const tmpDir = await fs.mkdtemp(resolve(os.tmpdir(), "mcpb-clean-"));
|
||||
const mcpbPath = resolve(tmpDir, "in.mcpb");
|
||||
const unpackPath = resolve(tmpDir, "out");
|
||||
console.log(" -- Cleaning MCPB...");
|
||||
try {
|
||||
await fs.copyFile(inputPath, mcpbPath);
|
||||
console.log(" -- Unpacking MCPB...");
|
||||
await unpackExtension({ mcpbPath, silent: true, outputDir: unpackPath });
|
||||
const manifestPath = resolve(unpackPath, "manifest.json");
|
||||
const originalManifest = await fs.readFile(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(originalManifest);
|
||||
const result = LooseMcpbManifestSchema.safeParse(manifestData);
|
||||
if (!result.success) {
|
||||
throw new Error(`Unrecoverable manifest issues, please run "mcpb validate"`);
|
||||
}
|
||||
await fs.writeFile(manifestPath, JSON.stringify(result.data, null, 2));
|
||||
if (originalManifest.trim() !==
|
||||
(await fs.readFile(manifestPath, "utf8")).trim()) {
|
||||
console.log(" -- Update manifest to be valid per MCPB schema");
|
||||
}
|
||||
else {
|
||||
console.log(" -- Manifest already valid per MCPB schema");
|
||||
}
|
||||
const nodeModulesPath = resolve(unpackPath, "node_modules");
|
||||
if (existsSync(nodeModulesPath)) {
|
||||
console.log(" -- node_modules found, deleting development dependencies");
|
||||
const destroyer = new DestroyerOfModules({
|
||||
rootDirectory: unpackPath,
|
||||
});
|
||||
try {
|
||||
await destroyer.destroy();
|
||||
}
|
||||
catch (error) {
|
||||
// If modules have already been deleted in a previous clean, the walker
|
||||
// will fail when it can't find required dependencies. This is expected
|
||||
// and safe to ignore.
|
||||
if (error instanceof Error &&
|
||||
error.message.includes("Failed to locate module")) {
|
||||
console.log(" -- Some modules already removed, skipping remaining cleanup");
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
console.log(" -- Removed development dependencies from node_modules");
|
||||
}
|
||||
else {
|
||||
console.log(" -- No node_modules, not pruning");
|
||||
}
|
||||
const before = await fs.stat(inputPath);
|
||||
const { packExtension } = await import("../cli/pack.js");
|
||||
await packExtension({
|
||||
extensionPath: unpackPath,
|
||||
outputPath: inputPath,
|
||||
silent: true,
|
||||
});
|
||||
const after = await fs.stat(inputPath);
|
||||
console.log("\nClean Complete:");
|
||||
console.log("Before:", prettyBytes(before.size));
|
||||
console.log("After:", prettyBytes(after.size));
|
||||
}
|
||||
finally {
|
||||
await fs.rm(tmpDir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user