mirror of
https://github.com/tvytlx/ai-agent-deep-dive.git
synced 2026-04-05 08:34:47 +08:00
Add extracted source directory and README navigation
This commit is contained in:
705
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/init.js
generated
vendored
Normal file
705
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/init.js
generated
vendored
Normal file
@@ -0,0 +1,705 @@
|
||||
import { confirm, input, select } from "@inquirer/prompts";
|
||||
import { existsSync, readFileSync, writeFileSync } from "fs";
|
||||
import { basename, join, resolve } from "path";
|
||||
import { CURRENT_MANIFEST_VERSION } from "../schemas.js";
|
||||
export function readPackageJson(dirPath) {
|
||||
const packageJsonPath = join(dirPath, "package.json");
|
||||
if (existsSync(packageJsonPath)) {
|
||||
try {
|
||||
return JSON.parse(readFileSync(packageJsonPath, "utf-8"));
|
||||
}
|
||||
catch (e) {
|
||||
// Ignore package.json parsing errors
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
export function getDefaultAuthorName(packageData) {
|
||||
if (typeof packageData.author === "string") {
|
||||
return packageData.author;
|
||||
}
|
||||
return packageData.author?.name || "";
|
||||
}
|
||||
export function getDefaultAuthorEmail(packageData) {
|
||||
if (typeof packageData.author === "object") {
|
||||
return packageData.author?.email || "";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
export function getDefaultAuthorUrl(packageData) {
|
||||
if (typeof packageData.author === "object") {
|
||||
return packageData.author?.url || "";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
export function getDefaultRepositoryUrl(packageData) {
|
||||
if (typeof packageData.repository === "string") {
|
||||
return packageData.repository;
|
||||
}
|
||||
return packageData.repository?.url || "";
|
||||
}
|
||||
export function getDefaultBasicInfo(packageData, resolvedPath) {
|
||||
const name = packageData.name || basename(resolvedPath);
|
||||
const authorName = getDefaultAuthorName(packageData) || "Unknown Author";
|
||||
const displayName = name;
|
||||
const version = packageData.version || "1.0.0";
|
||||
const description = packageData.description || "A MCPB bundle";
|
||||
return { name, authorName, displayName, version, description };
|
||||
}
|
||||
export function getDefaultAuthorInfo(packageData) {
|
||||
return {
|
||||
authorEmail: getDefaultAuthorEmail(packageData),
|
||||
authorUrl: getDefaultAuthorUrl(packageData),
|
||||
};
|
||||
}
|
||||
export function getDefaultServerConfig(packageData) {
|
||||
const serverType = "node";
|
||||
const entryPoint = getDefaultEntryPoint(serverType, packageData);
|
||||
const mcp_config = createMcpConfig(serverType, entryPoint);
|
||||
return { serverType, entryPoint, mcp_config };
|
||||
}
|
||||
export function getDefaultOptionalFields(packageData) {
|
||||
return {
|
||||
keywords: "",
|
||||
license: packageData.license || "MIT",
|
||||
repository: undefined,
|
||||
};
|
||||
}
|
||||
export function createMcpConfig(serverType, entryPoint) {
|
||||
switch (serverType) {
|
||||
case "node":
|
||||
return {
|
||||
command: "node",
|
||||
args: ["${__dirname}/" + entryPoint],
|
||||
env: {},
|
||||
};
|
||||
case "python":
|
||||
return {
|
||||
command: "python",
|
||||
args: ["${__dirname}/" + entryPoint],
|
||||
env: {
|
||||
PYTHONPATH: "${__dirname}/server/lib",
|
||||
},
|
||||
};
|
||||
case "binary":
|
||||
return {
|
||||
command: "${__dirname}/" + entryPoint,
|
||||
args: [],
|
||||
env: {},
|
||||
};
|
||||
}
|
||||
}
|
||||
export function getDefaultEntryPoint(serverType, packageData) {
|
||||
switch (serverType) {
|
||||
case "node":
|
||||
return packageData?.main || "server/index.js";
|
||||
case "python":
|
||||
return "server/main.py";
|
||||
case "binary":
|
||||
return "server/my-server";
|
||||
}
|
||||
}
|
||||
export async function promptBasicInfo(packageData, resolvedPath) {
|
||||
const defaultName = packageData.name || basename(resolvedPath);
|
||||
const name = await input({
|
||||
message: "Extension name:",
|
||||
default: defaultName,
|
||||
validate: (value) => value.trim().length > 0 || "Name is required",
|
||||
});
|
||||
const authorName = await input({
|
||||
message: "Author name:",
|
||||
default: getDefaultAuthorName(packageData),
|
||||
validate: (value) => value.trim().length > 0 || "Author name is required",
|
||||
});
|
||||
const displayName = await input({
|
||||
message: "Display name (optional):",
|
||||
default: name,
|
||||
});
|
||||
const version = await input({
|
||||
message: "Version:",
|
||||
default: packageData.version || "1.0.0",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Version is required";
|
||||
if (!/^\d+\.\d+\.\d+/.test(value)) {
|
||||
return "Version must follow semantic versioning (e.g., 1.0.0)";
|
||||
}
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const description = await input({
|
||||
message: "Description:",
|
||||
default: packageData.description || "",
|
||||
validate: (value) => value.trim().length > 0 || "Description is required",
|
||||
});
|
||||
return { name, authorName, displayName, version, description };
|
||||
}
|
||||
export async function promptAuthorInfo(packageData) {
|
||||
const authorEmail = await input({
|
||||
message: "Author email (optional):",
|
||||
default: getDefaultAuthorEmail(packageData),
|
||||
});
|
||||
const authorUrl = await input({
|
||||
message: "Author URL (optional):",
|
||||
default: getDefaultAuthorUrl(packageData),
|
||||
});
|
||||
return { authorEmail, authorUrl };
|
||||
}
|
||||
export async function promptServerConfig(packageData) {
|
||||
const serverType = (await select({
|
||||
message: "Server type:",
|
||||
choices: [
|
||||
{ name: "Node.js", value: "node" },
|
||||
{ name: "Python", value: "python" },
|
||||
{ name: "Binary", value: "binary" },
|
||||
],
|
||||
default: "node",
|
||||
}));
|
||||
const entryPoint = await input({
|
||||
message: "Entry point:",
|
||||
default: getDefaultEntryPoint(serverType, packageData),
|
||||
});
|
||||
const mcp_config = createMcpConfig(serverType, entryPoint);
|
||||
return { serverType, entryPoint, mcp_config };
|
||||
}
|
||||
export async function promptTools() {
|
||||
const addTools = await confirm({
|
||||
message: "Does your MCP Server provide tools you want to advertise (optional)?",
|
||||
default: true,
|
||||
});
|
||||
const tools = [];
|
||||
let toolsGenerated = false;
|
||||
if (addTools) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const toolName = await input({
|
||||
message: "Tool name:",
|
||||
validate: (value) => value.trim().length > 0 || "Tool name is required",
|
||||
});
|
||||
const toolDescription = await input({
|
||||
message: "Tool description (optional):",
|
||||
});
|
||||
tools.push({
|
||||
name: toolName,
|
||||
...(toolDescription ? { description: toolDescription } : {}),
|
||||
});
|
||||
addMore = await confirm({
|
||||
message: "Add another tool?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
// Ask about generated tools
|
||||
toolsGenerated = await confirm({
|
||||
message: "Does your server generate additional tools at runtime?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return { tools, toolsGenerated };
|
||||
}
|
||||
export async function promptPrompts() {
|
||||
const addPrompts = await confirm({
|
||||
message: "Does your MCP Server provide prompts you want to advertise (optional)?",
|
||||
default: false,
|
||||
});
|
||||
const prompts = [];
|
||||
let promptsGenerated = false;
|
||||
if (addPrompts) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const promptName = await input({
|
||||
message: "Prompt name:",
|
||||
validate: (value) => value.trim().length > 0 || "Prompt name is required",
|
||||
});
|
||||
const promptDescription = await input({
|
||||
message: "Prompt description (optional):",
|
||||
});
|
||||
// Ask about arguments
|
||||
const hasArguments = await confirm({
|
||||
message: "Does this prompt have arguments?",
|
||||
default: false,
|
||||
});
|
||||
const argumentNames = [];
|
||||
if (hasArguments) {
|
||||
let addMoreArgs = true;
|
||||
while (addMoreArgs) {
|
||||
const argName = await input({
|
||||
message: "Argument name:",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Argument name is required";
|
||||
if (argumentNames.includes(value)) {
|
||||
return "Argument names must be unique";
|
||||
}
|
||||
return true;
|
||||
},
|
||||
});
|
||||
argumentNames.push(argName);
|
||||
addMoreArgs = await confirm({
|
||||
message: "Add another argument?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
// Prompt for the text template
|
||||
const promptText = await input({
|
||||
message: hasArguments
|
||||
? `Prompt text (use \${arguments.name} for arguments: ${argumentNames.join(", ")}):`
|
||||
: "Prompt text:",
|
||||
validate: (value) => value.trim().length > 0 || "Prompt text is required",
|
||||
});
|
||||
prompts.push({
|
||||
name: promptName,
|
||||
...(promptDescription ? { description: promptDescription } : {}),
|
||||
...(argumentNames.length > 0 ? { arguments: argumentNames } : {}),
|
||||
text: promptText,
|
||||
});
|
||||
addMore = await confirm({
|
||||
message: "Add another prompt?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
// Ask about generated prompts
|
||||
promptsGenerated = await confirm({
|
||||
message: "Does your server generate additional prompts at runtime?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return { prompts, promptsGenerated };
|
||||
}
|
||||
export async function promptOptionalFields(packageData) {
|
||||
const keywords = await input({
|
||||
message: "Keywords (comma-separated, optional):",
|
||||
default: "",
|
||||
});
|
||||
const license = await input({
|
||||
message: "License:",
|
||||
default: packageData.license || "MIT",
|
||||
});
|
||||
const addRepository = await confirm({
|
||||
message: "Add repository information?",
|
||||
default: !!packageData.repository,
|
||||
});
|
||||
let repository;
|
||||
if (addRepository) {
|
||||
const repoUrl = await input({
|
||||
message: "Repository URL:",
|
||||
default: getDefaultRepositoryUrl(packageData),
|
||||
});
|
||||
if (repoUrl) {
|
||||
repository = {
|
||||
type: "git",
|
||||
url: repoUrl,
|
||||
};
|
||||
}
|
||||
}
|
||||
return { keywords, license, repository };
|
||||
}
|
||||
export async function promptLongDescription(description) {
|
||||
const hasLongDescription = await confirm({
|
||||
message: "Add a detailed long description?",
|
||||
default: false,
|
||||
});
|
||||
if (hasLongDescription) {
|
||||
const longDescription = await input({
|
||||
message: "Long description (supports basic markdown):",
|
||||
default: description,
|
||||
});
|
||||
return longDescription;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
export async function promptUrls() {
|
||||
const homepage = await input({
|
||||
message: "Homepage URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL (e.g., https://example.com)";
|
||||
}
|
||||
},
|
||||
});
|
||||
const documentation = await input({
|
||||
message: "Documentation URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL";
|
||||
}
|
||||
},
|
||||
});
|
||||
const support = await input({
|
||||
message: "Support URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL";
|
||||
}
|
||||
},
|
||||
});
|
||||
return { homepage, documentation, support };
|
||||
}
|
||||
export async function promptVisualAssets() {
|
||||
const icon = await input({
|
||||
message: "Icon file path (optional, relative to manifest):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
if (value.includes(".."))
|
||||
return "Relative paths cannot include '..'";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const addScreenshots = await confirm({
|
||||
message: "Add screenshots?",
|
||||
default: false,
|
||||
});
|
||||
const screenshots = [];
|
||||
if (addScreenshots) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const screenshot = await input({
|
||||
message: "Screenshot file path (relative to manifest):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Screenshot path is required";
|
||||
if (value.includes(".."))
|
||||
return "Relative paths cannot include '..'";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
screenshots.push(screenshot);
|
||||
addMore = await confirm({
|
||||
message: "Add another screenshot?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
return { icon, screenshots };
|
||||
}
|
||||
export async function promptCompatibility(serverType) {
|
||||
const addCompatibility = await confirm({
|
||||
message: "Add compatibility constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (!addCompatibility) {
|
||||
return undefined;
|
||||
}
|
||||
const addPlatforms = await confirm({
|
||||
message: "Specify supported platforms?",
|
||||
default: false,
|
||||
});
|
||||
let platforms;
|
||||
if (addPlatforms) {
|
||||
const selectedPlatforms = [];
|
||||
const supportsDarwin = await confirm({
|
||||
message: "Support macOS (darwin)?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsDarwin)
|
||||
selectedPlatforms.push("darwin");
|
||||
const supportsWin32 = await confirm({
|
||||
message: "Support Windows (win32)?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsWin32)
|
||||
selectedPlatforms.push("win32");
|
||||
const supportsLinux = await confirm({
|
||||
message: "Support Linux?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsLinux)
|
||||
selectedPlatforms.push("linux");
|
||||
platforms = selectedPlatforms.length > 0 ? selectedPlatforms : undefined;
|
||||
}
|
||||
let runtimes;
|
||||
if (serverType !== "binary") {
|
||||
const addRuntimes = await confirm({
|
||||
message: "Specify runtime version constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (addRuntimes) {
|
||||
if (serverType === "python") {
|
||||
const pythonVersion = await input({
|
||||
message: "Python version constraint (e.g., >=3.8,<4.0):",
|
||||
validate: (value) => value.trim().length > 0 || "Python version constraint is required",
|
||||
});
|
||||
runtimes = { python: pythonVersion };
|
||||
}
|
||||
else if (serverType === "node") {
|
||||
const nodeVersion = await input({
|
||||
message: "Node.js version constraint (e.g., >=16.0.0):",
|
||||
validate: (value) => value.trim().length > 0 || "Node.js version constraint is required",
|
||||
});
|
||||
runtimes = { node: nodeVersion };
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
...(platforms ? { platforms } : {}),
|
||||
...(runtimes ? { runtimes } : {}),
|
||||
};
|
||||
}
|
||||
export async function promptUserConfig() {
|
||||
const addUserConfig = await confirm({
|
||||
message: "Add user-configurable options?",
|
||||
default: false,
|
||||
});
|
||||
if (!addUserConfig) {
|
||||
return {};
|
||||
}
|
||||
const userConfig = {};
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const optionKey = await input({
|
||||
message: "Configuration option key (unique identifier):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Key is required";
|
||||
if (userConfig[value])
|
||||
return "Key must be unique";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const optionType = (await select({
|
||||
message: "Option type:",
|
||||
choices: [
|
||||
{ name: "String", value: "string" },
|
||||
{ name: "Number", value: "number" },
|
||||
{ name: "Boolean", value: "boolean" },
|
||||
{ name: "Directory", value: "directory" },
|
||||
{ name: "File", value: "file" },
|
||||
],
|
||||
}));
|
||||
const optionTitle = await input({
|
||||
message: "Option title (human-readable name):",
|
||||
validate: (value) => value.trim().length > 0 || "Title is required",
|
||||
});
|
||||
const optionDescription = await input({
|
||||
message: "Option description:",
|
||||
validate: (value) => value.trim().length > 0 || "Description is required",
|
||||
});
|
||||
const optionRequired = await confirm({
|
||||
message: "Is this option required?",
|
||||
default: false,
|
||||
});
|
||||
const optionSensitive = await confirm({
|
||||
message: "Is this option sensitive (like a password)?",
|
||||
default: false,
|
||||
});
|
||||
// Build the option object
|
||||
const option = {
|
||||
type: optionType,
|
||||
title: optionTitle,
|
||||
description: optionDescription,
|
||||
required: optionRequired,
|
||||
sensitive: optionSensitive,
|
||||
};
|
||||
// Add default value if not required
|
||||
if (!optionRequired) {
|
||||
let defaultValue;
|
||||
if (optionType === "boolean") {
|
||||
defaultValue = await confirm({
|
||||
message: "Default value:",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
else if (optionType === "number") {
|
||||
const defaultStr = await input({
|
||||
message: "Default value (number):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
defaultValue = defaultStr ? Number(defaultStr) : undefined;
|
||||
}
|
||||
else {
|
||||
defaultValue = await input({
|
||||
message: "Default value (optional):",
|
||||
});
|
||||
}
|
||||
if (defaultValue !== undefined && defaultValue !== "") {
|
||||
option.default = defaultValue;
|
||||
}
|
||||
}
|
||||
// Add constraints for number types
|
||||
if (optionType === "number") {
|
||||
const addConstraints = await confirm({
|
||||
message: "Add min/max constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (addConstraints) {
|
||||
const min = await input({
|
||||
message: "Minimum value (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
const max = await input({
|
||||
message: "Maximum value (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
if (min)
|
||||
option.min = Number(min);
|
||||
if (max)
|
||||
option.max = Number(max);
|
||||
}
|
||||
}
|
||||
userConfig[optionKey] = option;
|
||||
addMore = await confirm({
|
||||
message: "Add another configuration option?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return userConfig;
|
||||
}
|
||||
export function buildManifest(basicInfo, longDescription, authorInfo, urls, visualAssets, serverConfig, tools, toolsGenerated, prompts, promptsGenerated, compatibility, userConfig, optionalFields) {
|
||||
const { name, displayName, version, description, authorName } = basicInfo;
|
||||
const { authorEmail, authorUrl } = authorInfo;
|
||||
const { serverType, entryPoint, mcp_config } = serverConfig;
|
||||
const { keywords, license, repository } = optionalFields;
|
||||
return {
|
||||
manifest_version: CURRENT_MANIFEST_VERSION,
|
||||
name,
|
||||
...(displayName && displayName !== name
|
||||
? { display_name: displayName }
|
||||
: {}),
|
||||
version,
|
||||
description,
|
||||
...(longDescription ? { long_description: longDescription } : {}),
|
||||
author: {
|
||||
name: authorName,
|
||||
...(authorEmail ? { email: authorEmail } : {}),
|
||||
...(authorUrl ? { url: authorUrl } : {}),
|
||||
},
|
||||
...(urls.homepage ? { homepage: urls.homepage } : {}),
|
||||
...(urls.documentation ? { documentation: urls.documentation } : {}),
|
||||
...(urls.support ? { support: urls.support } : {}),
|
||||
...(visualAssets.icon ? { icon: visualAssets.icon } : {}),
|
||||
...(visualAssets.screenshots.length > 0
|
||||
? { screenshots: visualAssets.screenshots }
|
||||
: {}),
|
||||
server: {
|
||||
type: serverType,
|
||||
entry_point: entryPoint,
|
||||
mcp_config,
|
||||
},
|
||||
...(tools.length > 0 ? { tools } : {}),
|
||||
...(toolsGenerated ? { tools_generated: true } : {}),
|
||||
...(prompts.length > 0 ? { prompts } : {}),
|
||||
...(promptsGenerated ? { prompts_generated: true } : {}),
|
||||
...(compatibility ? { compatibility } : {}),
|
||||
...(Object.keys(userConfig).length > 0 ? { user_config: userConfig } : {}),
|
||||
...(keywords
|
||||
? {
|
||||
keywords: keywords
|
||||
.split(",")
|
||||
.map((k) => k.trim())
|
||||
.filter((k) => k),
|
||||
}
|
||||
: {}),
|
||||
...(license ? { license } : {}),
|
||||
...(repository ? { repository } : {}),
|
||||
};
|
||||
}
|
||||
export function printNextSteps() {
|
||||
console.log("\nNext steps:");
|
||||
console.log(`1. Ensure all your production dependencies are in this directory`);
|
||||
console.log(`2. Run 'mcpb pack' to create your .mcpb file`);
|
||||
}
|
||||
export async function initExtension(targetPath = process.cwd(), nonInteractive = false) {
|
||||
const resolvedPath = resolve(targetPath);
|
||||
const manifestPath = join(resolvedPath, "manifest.json");
|
||||
if (existsSync(manifestPath)) {
|
||||
if (nonInteractive) {
|
||||
console.log("manifest.json already exists. Use --force to overwrite in non-interactive mode.");
|
||||
return false;
|
||||
}
|
||||
const overwrite = await confirm({
|
||||
message: "manifest.json already exists. Overwrite?",
|
||||
default: false,
|
||||
});
|
||||
if (!overwrite) {
|
||||
console.log("Cancelled");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!nonInteractive) {
|
||||
console.log("This utility will help you create a manifest.json file for your MCPB bundle.");
|
||||
console.log("Press ^C at any time to quit.\n");
|
||||
}
|
||||
else {
|
||||
console.log("Creating manifest.json with default values...");
|
||||
}
|
||||
try {
|
||||
const packageData = readPackageJson(resolvedPath);
|
||||
// Prompt for all information or use defaults
|
||||
const basicInfo = nonInteractive
|
||||
? getDefaultBasicInfo(packageData, resolvedPath)
|
||||
: await promptBasicInfo(packageData, resolvedPath);
|
||||
const longDescription = nonInteractive
|
||||
? undefined
|
||||
: await promptLongDescription(basicInfo.description);
|
||||
const authorInfo = nonInteractive
|
||||
? getDefaultAuthorInfo(packageData)
|
||||
: await promptAuthorInfo(packageData);
|
||||
const urls = nonInteractive
|
||||
? { homepage: "", documentation: "", support: "" }
|
||||
: await promptUrls();
|
||||
const visualAssets = nonInteractive
|
||||
? { icon: "", screenshots: [] }
|
||||
: await promptVisualAssets();
|
||||
const serverConfig = nonInteractive
|
||||
? getDefaultServerConfig(packageData)
|
||||
: await promptServerConfig(packageData);
|
||||
const toolsData = nonInteractive
|
||||
? { tools: [], toolsGenerated: false }
|
||||
: await promptTools();
|
||||
const promptsData = nonInteractive
|
||||
? { prompts: [], promptsGenerated: false }
|
||||
: await promptPrompts();
|
||||
const compatibility = nonInteractive
|
||||
? undefined
|
||||
: await promptCompatibility(serverConfig.serverType);
|
||||
const userConfig = nonInteractive ? {} : await promptUserConfig();
|
||||
const optionalFields = nonInteractive
|
||||
? getDefaultOptionalFields(packageData)
|
||||
: await promptOptionalFields(packageData);
|
||||
// Build manifest
|
||||
const manifest = buildManifest(basicInfo, longDescription, authorInfo, urls, visualAssets, serverConfig, toolsData.tools, toolsData.toolsGenerated, promptsData.prompts, promptsData.promptsGenerated, compatibility, userConfig, optionalFields);
|
||||
// Write manifest
|
||||
writeFileSync(manifestPath, JSON.stringify(manifest, null, 2) + "\n");
|
||||
console.log(`\nCreated manifest.json at ${manifestPath}`);
|
||||
printNextSteps();
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error && error.message.includes("User force closed")) {
|
||||
console.log("\nCancelled");
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
200
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/pack.js
generated
vendored
Normal file
200
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/pack.js
generated
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
import { confirm } from "@inquirer/prompts";
|
||||
import { createHash } from "crypto";
|
||||
import { zipSync } from "fflate";
|
||||
import { existsSync, mkdirSync, readFileSync, statSync, writeFileSync, } from "fs";
|
||||
import { basename, join, relative, resolve, sep } from "path";
|
||||
import { getAllFilesWithCount, readMcpbIgnorePatterns } from "../node/files.js";
|
||||
import { validateManifest } from "../node/validate.js";
|
||||
import { CURRENT_MANIFEST_VERSION, McpbManifestSchema } from "../schemas.js";
|
||||
import { getLogger } from "../shared/log.js";
|
||||
import { initExtension } from "./init.js";
|
||||
function formatFileSize(bytes) {
|
||||
if (bytes < 1024) {
|
||||
return `${bytes}B`;
|
||||
}
|
||||
else if (bytes < 1024 * 1024) {
|
||||
return `${(bytes / 1024).toFixed(1)}kB`;
|
||||
}
|
||||
else {
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
|
||||
}
|
||||
}
|
||||
function sanitizeNameForFilename(name) {
|
||||
// Replace spaces with hyphens
|
||||
// Remove or replace characters that are problematic in filenames
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, "-") // Replace spaces with hyphens
|
||||
.replace(/[^a-z0-9-_.]/g, "") // Keep only alphanumeric, hyphens, underscores, and dots
|
||||
.replace(/-+/g, "-") // Replace multiple hyphens with single hyphen
|
||||
.replace(/^-+|-+$/g, "") // Remove leading/trailing hyphens
|
||||
.substring(0, 100); // Limit length to 100 characters
|
||||
}
|
||||
export async function packExtension({ extensionPath, outputPath, silent, }) {
|
||||
const resolvedPath = resolve(extensionPath);
|
||||
const logger = getLogger({ silent });
|
||||
// Check if directory exists
|
||||
if (!existsSync(resolvedPath) || !statSync(resolvedPath).isDirectory()) {
|
||||
logger.error(`ERROR: Directory not found: ${extensionPath}`);
|
||||
return false;
|
||||
}
|
||||
// Check if manifest exists
|
||||
const manifestPath = join(resolvedPath, "manifest.json");
|
||||
if (!existsSync(manifestPath)) {
|
||||
logger.log(`No manifest.json found in ${extensionPath}`);
|
||||
const shouldInit = await confirm({
|
||||
message: "Would you like to create a manifest.json file?",
|
||||
default: true,
|
||||
});
|
||||
if (shouldInit) {
|
||||
const success = await initExtension(extensionPath);
|
||||
if (!success) {
|
||||
logger.error("ERROR: Failed to create manifest");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: Cannot pack extension without manifest.json");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Validate manifest first
|
||||
logger.log("Validating manifest...");
|
||||
if (!validateManifest(manifestPath)) {
|
||||
logger.error("ERROR: Cannot pack extension with invalid manifest");
|
||||
return false;
|
||||
}
|
||||
// Read and parse manifest
|
||||
let manifest;
|
||||
try {
|
||||
const manifestContent = readFileSync(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(manifestContent);
|
||||
manifest = McpbManifestSchema.parse(manifestData);
|
||||
}
|
||||
catch (error) {
|
||||
logger.error("ERROR: Failed to parse manifest.json");
|
||||
if (error instanceof Error) {
|
||||
logger.error(` ${error.message}`);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const manifestVersion = manifest.manifest_version || manifest.dxt_version;
|
||||
if (manifestVersion !== CURRENT_MANIFEST_VERSION) {
|
||||
logger.error(`ERROR: Manifest version mismatch. Expected "${CURRENT_MANIFEST_VERSION}", found "${manifestVersion}"`);
|
||||
logger.error(` Please update the manifest_version in your manifest.json to "${CURRENT_MANIFEST_VERSION}"`);
|
||||
return false;
|
||||
}
|
||||
// Determine output path
|
||||
const extensionName = basename(resolvedPath);
|
||||
const finalOutputPath = outputPath
|
||||
? resolve(outputPath)
|
||||
: resolve(`${extensionName}.mcpb`);
|
||||
// Ensure output directory exists
|
||||
const outputDir = join(finalOutputPath, "..");
|
||||
mkdirSync(outputDir, { recursive: true });
|
||||
try {
|
||||
// Read .mcpbignore patterns if present
|
||||
const mcpbIgnorePatterns = readMcpbIgnorePatterns(resolvedPath);
|
||||
// Get all files in the extension directory
|
||||
const { files, ignoredCount } = getAllFilesWithCount(resolvedPath, resolvedPath, {}, mcpbIgnorePatterns);
|
||||
// Print package header
|
||||
logger.log(`\n📦 ${manifest.name}@${manifest.version}`);
|
||||
// Print file list
|
||||
logger.log("Archive Contents");
|
||||
const fileEntries = Object.entries(files);
|
||||
let totalUnpackedSize = 0;
|
||||
// Sort files for consistent output
|
||||
fileEntries.sort(([a], [b]) => a.localeCompare(b));
|
||||
// Group files by directory for deep nesting
|
||||
const directoryGroups = new Map();
|
||||
const shallowFiles = [];
|
||||
for (const [filePath, fileData] of fileEntries) {
|
||||
const relPath = relative(resolvedPath, filePath);
|
||||
const content = fileData.data;
|
||||
const size = typeof content === "string"
|
||||
? Buffer.byteLength(content, "utf8")
|
||||
: content.length;
|
||||
totalUnpackedSize += size;
|
||||
// Check if file is deeply nested (3+ levels)
|
||||
const parts = relPath.split(sep);
|
||||
if (parts.length > 3) {
|
||||
// Group by the first 3 directory levels
|
||||
const groupKey = parts.slice(0, 3).join("/");
|
||||
if (!directoryGroups.has(groupKey)) {
|
||||
directoryGroups.set(groupKey, { files: [], totalSize: 0 });
|
||||
}
|
||||
const group = directoryGroups.get(groupKey);
|
||||
group.files.push(relPath);
|
||||
group.totalSize += size;
|
||||
}
|
||||
else {
|
||||
shallowFiles.push({ path: relPath, size });
|
||||
}
|
||||
}
|
||||
// Print shallow files first
|
||||
for (const { path, size } of shallowFiles) {
|
||||
logger.log(`${formatFileSize(size).padStart(8)} ${path}`);
|
||||
}
|
||||
// Print grouped directories
|
||||
for (const [dir, { files, totalSize }] of directoryGroups) {
|
||||
if (files.length === 1) {
|
||||
// If only one file in the group, print it normally
|
||||
const filePath = files[0];
|
||||
const fileSize = totalSize;
|
||||
logger.log(`${formatFileSize(fileSize).padStart(8)} ${filePath}`);
|
||||
}
|
||||
else {
|
||||
// Print directory summary
|
||||
logger.log(`${formatFileSize(totalSize).padStart(8)} ${dir}/ [and ${files.length} more files]`);
|
||||
}
|
||||
}
|
||||
// Create zip with preserved file permissions
|
||||
const zipFiles = {};
|
||||
const isUnix = process.platform !== "win32";
|
||||
for (const [filePath, fileData] of Object.entries(files)) {
|
||||
if (isUnix) {
|
||||
// Set external file attributes to preserve Unix permissions
|
||||
// The mode needs to be shifted to the upper 16 bits for ZIP format
|
||||
zipFiles[filePath] = [
|
||||
fileData.data,
|
||||
{ os: 3, attrs: (fileData.mode & 0o777) << 16 },
|
||||
];
|
||||
}
|
||||
else {
|
||||
// On Windows, use default ZIP attributes (no Unix permissions)
|
||||
zipFiles[filePath] = fileData.data;
|
||||
}
|
||||
}
|
||||
const zipData = zipSync(zipFiles, {
|
||||
level: 9, // Maximum compression
|
||||
mtime: new Date(),
|
||||
});
|
||||
// Write zip file
|
||||
writeFileSync(finalOutputPath, zipData);
|
||||
// Calculate SHA sum
|
||||
const shasum = createHash("sha1").update(zipData).digest("hex");
|
||||
// Print archive details
|
||||
const sanitizedName = sanitizeNameForFilename(manifest.name);
|
||||
const archiveName = `${sanitizedName}-${manifest.version}.mcpb`;
|
||||
logger.log("\nArchive Details");
|
||||
logger.log(`name: ${manifest.name}`);
|
||||
logger.log(`version: ${manifest.version}`);
|
||||
logger.log(`filename: ${archiveName}`);
|
||||
logger.log(`package size: ${formatFileSize(zipData.length)}`);
|
||||
logger.log(`unpacked size: ${formatFileSize(totalUnpackedSize)}`);
|
||||
logger.log(`shasum: ${shasum}`);
|
||||
logger.log(`total files: ${fileEntries.length}`);
|
||||
logger.log(`ignored (.mcpbignore) files: ${ignoredCount}`);
|
||||
logger.log(`\nOutput: ${finalOutputPath}`);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(`ERROR: Archive error: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: Unknown archive error occurred");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
101
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/unpack.js
generated
vendored
Normal file
101
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/unpack.js
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
import { unzipSync } from "fflate";
|
||||
import { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync, } from "fs";
|
||||
import { join, resolve, sep } from "path";
|
||||
import { extractSignatureBlock } from "../node/sign.js";
|
||||
import { getLogger } from "../shared/log.js";
|
||||
export async function unpackExtension({ mcpbPath, outputDir, silent, }) {
|
||||
const logger = getLogger({ silent });
|
||||
const resolvedMcpbPath = resolve(mcpbPath);
|
||||
if (!existsSync(resolvedMcpbPath)) {
|
||||
logger.error(`ERROR: MCPB file not found: ${mcpbPath}`);
|
||||
return false;
|
||||
}
|
||||
const finalOutputDir = outputDir ? resolve(outputDir) : process.cwd();
|
||||
if (!existsSync(finalOutputDir)) {
|
||||
mkdirSync(finalOutputDir, { recursive: true });
|
||||
}
|
||||
try {
|
||||
const fileContent = readFileSync(resolvedMcpbPath);
|
||||
const { originalContent } = extractSignatureBlock(fileContent);
|
||||
// Parse file attributes from ZIP central directory
|
||||
const fileAttributes = new Map();
|
||||
const isUnix = process.platform !== "win32";
|
||||
if (isUnix) {
|
||||
// Parse ZIP central directory to extract file attributes
|
||||
const zipBuffer = originalContent;
|
||||
// Find end of central directory record
|
||||
let eocdOffset = -1;
|
||||
for (let i = zipBuffer.length - 22; i >= 0; i--) {
|
||||
if (zipBuffer.readUInt32LE(i) === 0x06054b50) {
|
||||
eocdOffset = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (eocdOffset !== -1) {
|
||||
const centralDirOffset = zipBuffer.readUInt32LE(eocdOffset + 16);
|
||||
const centralDirEntries = zipBuffer.readUInt16LE(eocdOffset + 8);
|
||||
let offset = centralDirOffset;
|
||||
for (let i = 0; i < centralDirEntries; i++) {
|
||||
if (zipBuffer.readUInt32LE(offset) === 0x02014b50) {
|
||||
const externalAttrs = zipBuffer.readUInt32LE(offset + 38);
|
||||
const filenameLength = zipBuffer.readUInt16LE(offset + 28);
|
||||
const filename = zipBuffer.toString("utf8", offset + 46, offset + 46 + filenameLength);
|
||||
// Extract Unix permissions from external attributes (upper 16 bits)
|
||||
const mode = (externalAttrs >> 16) & 0o777;
|
||||
if (mode > 0) {
|
||||
fileAttributes.set(filename, mode);
|
||||
}
|
||||
const extraFieldLength = zipBuffer.readUInt16LE(offset + 30);
|
||||
const commentLength = zipBuffer.readUInt16LE(offset + 32);
|
||||
offset += 46 + filenameLength + extraFieldLength + commentLength;
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const decompressed = unzipSync(originalContent);
|
||||
for (const relativePath in decompressed) {
|
||||
if (Object.prototype.hasOwnProperty.call(decompressed, relativePath)) {
|
||||
const data = decompressed[relativePath];
|
||||
const fullPath = join(finalOutputDir, relativePath);
|
||||
// Prevent zip slip attacks by validating the resolved path
|
||||
const normalizedPath = resolve(fullPath);
|
||||
const normalizedOutputDir = resolve(finalOutputDir);
|
||||
if (!normalizedPath.startsWith(normalizedOutputDir + sep) &&
|
||||
normalizedPath !== normalizedOutputDir) {
|
||||
throw new Error(`Path traversal attempt detected: ${relativePath}`);
|
||||
}
|
||||
const dir = join(fullPath, "..");
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
writeFileSync(fullPath, data);
|
||||
// Restore Unix file permissions if available
|
||||
if (isUnix && fileAttributes.has(relativePath)) {
|
||||
try {
|
||||
const mode = fileAttributes.get(relativePath);
|
||||
if (mode !== undefined) {
|
||||
chmodSync(fullPath, mode);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Silently ignore permission errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.log(`Extension unpacked successfully to ${finalOutputDir}`);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(`ERROR: Failed to unpack extension: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: An unknown error occurred during unpacking.");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
10
extracted-source/node_modules/@anthropic-ai/mcpb/dist/index.js
generated
vendored
Normal file
10
extracted-source/node_modules/@anthropic-ai/mcpb/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
// Default export includes everything (backward compatibility)
|
||||
export * from "./cli/init.js";
|
||||
export * from "./cli/pack.js";
|
||||
export * from "./cli/unpack.js";
|
||||
export * from "./node/files.js";
|
||||
export * from "./node/sign.js";
|
||||
export * from "./node/validate.js";
|
||||
export * from "./schemas.js";
|
||||
export * from "./shared/config.js";
|
||||
export * from "./types.js";
|
||||
115
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/files.js
generated
vendored
Normal file
115
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/files.js
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
import { existsSync, readdirSync, readFileSync, statSync } from "fs";
|
||||
import ignore from "ignore";
|
||||
import { join, relative, sep } from "path";
|
||||
// Files/patterns to exclude from the package
|
||||
export const EXCLUDE_PATTERNS = [
|
||||
".DS_Store",
|
||||
"Thumbs.db",
|
||||
".gitignore",
|
||||
".git",
|
||||
".mcpbignore",
|
||||
"*.log",
|
||||
".env*",
|
||||
".npm",
|
||||
".npmrc",
|
||||
".yarnrc",
|
||||
".yarn",
|
||||
".eslintrc",
|
||||
".editorconfig",
|
||||
".prettierrc",
|
||||
".prettierignore",
|
||||
".eslintignore",
|
||||
".nycrc",
|
||||
".babelrc",
|
||||
".pnp.*",
|
||||
"node_modules/.cache",
|
||||
"node_modules/.bin",
|
||||
"*.map",
|
||||
".env.local",
|
||||
".env.*.local",
|
||||
"npm-debug.log*",
|
||||
"yarn-debug.log*",
|
||||
"yarn-error.log*",
|
||||
"package-lock.json",
|
||||
"yarn.lock",
|
||||
"*.mcpb",
|
||||
"*.d.ts",
|
||||
"*.tsbuildinfo",
|
||||
"tsconfig.json",
|
||||
];
|
||||
/**
|
||||
* Read and parse .mcpbignore file patterns
|
||||
*/
|
||||
export function readMcpbIgnorePatterns(baseDir) {
|
||||
const mcpbIgnorePath = join(baseDir, ".mcpbignore");
|
||||
if (!existsSync(mcpbIgnorePath)) {
|
||||
return [];
|
||||
}
|
||||
try {
|
||||
const content = readFileSync(mcpbIgnorePath, "utf-8");
|
||||
return content
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0 && !line.startsWith("#"));
|
||||
}
|
||||
catch (error) {
|
||||
console.warn(`Warning: Could not read .mcpbignore file: ${error instanceof Error ? error.message : "Unknown error"}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
function buildIgnoreChecker(additionalPatterns) {
|
||||
return ignore().add(EXCLUDE_PATTERNS).add(additionalPatterns);
|
||||
}
|
||||
/**
|
||||
* Used for testing, calls the same methods as the other ignore checks
|
||||
*/
|
||||
export function shouldExclude(filePath, additionalPatterns = []) {
|
||||
return buildIgnoreChecker(additionalPatterns).ignores(filePath);
|
||||
}
|
||||
export function getAllFiles(dirPath, baseDir = dirPath, fileList = {}, additionalPatterns = []) {
|
||||
const files = readdirSync(dirPath);
|
||||
const ignoreChecker = buildIgnoreChecker(additionalPatterns);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file);
|
||||
const relativePath = relative(baseDir, filePath);
|
||||
if (ignoreChecker.ignores(relativePath)) {
|
||||
continue;
|
||||
}
|
||||
const stat = statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
getAllFiles(filePath, baseDir, fileList, additionalPatterns);
|
||||
}
|
||||
else {
|
||||
// Use forward slashes in zip file paths
|
||||
const zipPath = relativePath.split(sep).join("/");
|
||||
fileList[zipPath] = readFileSync(filePath);
|
||||
}
|
||||
}
|
||||
return fileList;
|
||||
}
|
||||
export function getAllFilesWithCount(dirPath, baseDir = dirPath, fileList = {}, additionalPatterns = [], ignoredCount = 0) {
|
||||
const files = readdirSync(dirPath);
|
||||
const ignoreChecker = buildIgnoreChecker(additionalPatterns);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file);
|
||||
const relativePath = relative(baseDir, filePath);
|
||||
if (ignoreChecker.ignores(relativePath)) {
|
||||
ignoredCount++;
|
||||
continue;
|
||||
}
|
||||
const stat = statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
const result = getAllFilesWithCount(filePath, baseDir, fileList, additionalPatterns, ignoredCount);
|
||||
ignoredCount = result.ignoredCount;
|
||||
}
|
||||
else {
|
||||
// Use forward slashes in zip file paths
|
||||
const zipPath = relativePath.split(sep).join("/");
|
||||
fileList[zipPath] = {
|
||||
data: readFileSync(filePath),
|
||||
mode: stat.mode,
|
||||
};
|
||||
}
|
||||
}
|
||||
return { files: fileList, ignoredCount };
|
||||
}
|
||||
333
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/sign.js
generated
vendored
Normal file
333
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/sign.js
generated
vendored
Normal file
@@ -0,0 +1,333 @@
|
||||
import { execFile } from "child_process";
|
||||
import { readFileSync, writeFileSync } from "fs";
|
||||
import { mkdtemp, rm, writeFile } from "fs/promises";
|
||||
import forge from "node-forge";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path";
|
||||
import { promisify } from "util";
|
||||
// Signature block markers
|
||||
const SIGNATURE_HEADER = "MCPB_SIG_V1";
|
||||
const SIGNATURE_FOOTER = "MCPB_SIG_END";
|
||||
const execFileAsync = promisify(execFile);
|
||||
/**
|
||||
* Signs a MCPB file with the given certificate and private key using PKCS#7
|
||||
*
|
||||
* @param mcpbPath Path to the MCPB file to sign
|
||||
* @param certPath Path to the certificate file (PEM format)
|
||||
* @param keyPath Path to the private key file (PEM format)
|
||||
* @param intermediates Optional array of intermediate certificate paths
|
||||
*/
|
||||
export function signMcpbFile(mcpbPath, certPath, keyPath, intermediates) {
|
||||
// Read the original MCPB file
|
||||
const mcpbContent = readFileSync(mcpbPath);
|
||||
// Read certificate and key
|
||||
const certificatePem = readFileSync(certPath, "utf-8");
|
||||
const privateKeyPem = readFileSync(keyPath, "utf-8");
|
||||
// Read intermediate certificates if provided
|
||||
const intermediatePems = intermediates?.map((path) => readFileSync(path, "utf-8"));
|
||||
// Create PKCS#7 signed data
|
||||
const p7 = forge.pkcs7.createSignedData();
|
||||
p7.content = forge.util.createBuffer(mcpbContent);
|
||||
// Parse and add certificates
|
||||
const signingCert = forge.pki.certificateFromPem(certificatePem);
|
||||
const privateKey = forge.pki.privateKeyFromPem(privateKeyPem);
|
||||
p7.addCertificate(signingCert);
|
||||
// Add intermediate certificates
|
||||
if (intermediatePems) {
|
||||
for (const pem of intermediatePems) {
|
||||
p7.addCertificate(forge.pki.certificateFromPem(pem));
|
||||
}
|
||||
}
|
||||
// Add signer
|
||||
p7.addSigner({
|
||||
key: privateKey,
|
||||
certificate: signingCert,
|
||||
digestAlgorithm: forge.pki.oids.sha256,
|
||||
authenticatedAttributes: [
|
||||
{
|
||||
type: forge.pki.oids.contentType,
|
||||
value: forge.pki.oids.data,
|
||||
},
|
||||
{
|
||||
type: forge.pki.oids.messageDigest,
|
||||
// Value will be auto-populated
|
||||
},
|
||||
{
|
||||
type: forge.pki.oids.signingTime,
|
||||
// Value will be auto-populated with current time
|
||||
},
|
||||
],
|
||||
});
|
||||
// Sign with detached signature
|
||||
p7.sign({ detached: true });
|
||||
// Convert to DER format
|
||||
const asn1 = forge.asn1.toDer(p7.toAsn1());
|
||||
const pkcs7Signature = Buffer.from(asn1.getBytes(), "binary");
|
||||
// Create signature block with PKCS#7 data
|
||||
const signatureBlock = createSignatureBlock(pkcs7Signature);
|
||||
// Append signature block to MCPB file
|
||||
const signedContent = Buffer.concat([mcpbContent, signatureBlock]);
|
||||
writeFileSync(mcpbPath, signedContent);
|
||||
}
|
||||
/**
|
||||
* Verifies a signed MCPB file using OS certificate store
|
||||
*
|
||||
* @param mcpbPath Path to the signed MCPB file
|
||||
* @returns Signature information including verification status
|
||||
*/
|
||||
export async function verifyMcpbFile(mcpbPath) {
|
||||
try {
|
||||
const fileContent = readFileSync(mcpbPath);
|
||||
// Find and extract signature block
|
||||
const { originalContent, pkcs7Signature } = extractSignatureBlock(fileContent);
|
||||
if (!pkcs7Signature) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Parse PKCS#7 signature
|
||||
const asn1 = forge.asn1.fromDer(pkcs7Signature.toString("binary"));
|
||||
const p7Message = forge.pkcs7.messageFromAsn1(asn1);
|
||||
// Verify it's signed data and cast to correct type
|
||||
if (!("type" in p7Message) ||
|
||||
p7Message.type !== forge.pki.oids.signedData) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Now we know it's PkcsSignedData. The types are incorrect, so we'll
|
||||
// fix them there
|
||||
const p7 = p7Message;
|
||||
// Extract certificates from PKCS#7
|
||||
const certificates = p7.certificates || [];
|
||||
if (certificates.length === 0) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Get the signing certificate (first one)
|
||||
const signingCert = certificates[0];
|
||||
// Verify PKCS#7 signature
|
||||
const contentBuf = forge.util.createBuffer(originalContent);
|
||||
try {
|
||||
p7.verify({ authenticatedAttributes: true });
|
||||
// Also verify the content matches
|
||||
const signerInfos = p7.signerInfos;
|
||||
const signerInfo = signerInfos?.[0];
|
||||
if (signerInfo) {
|
||||
const md = forge.md.sha256.create();
|
||||
md.update(contentBuf.getBytes());
|
||||
const digest = md.digest().getBytes();
|
||||
// Find the message digest attribute
|
||||
let messageDigest = null;
|
||||
for (const attr of signerInfo.authenticatedAttributes) {
|
||||
if (attr.type === forge.pki.oids.messageDigest) {
|
||||
messageDigest = attr.value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!messageDigest || messageDigest !== digest) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Convert forge certificate to PEM for OS verification
|
||||
const certPem = forge.pki.certificateToPem(signingCert);
|
||||
const intermediatePems = certificates
|
||||
.slice(1)
|
||||
.map((cert) => Buffer.from(forge.pki.certificateToPem(cert)));
|
||||
// Verify certificate chain against OS trust store
|
||||
const chainValid = await verifyCertificateChain(Buffer.from(certPem), intermediatePems);
|
||||
if (!chainValid) {
|
||||
// Signature is valid but certificate is not trusted
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Extract certificate info
|
||||
const isSelfSigned = signingCert.issuer.getField("CN")?.value ===
|
||||
signingCert.subject.getField("CN")?.value;
|
||||
return {
|
||||
status: isSelfSigned ? "self-signed" : "signed",
|
||||
publisher: signingCert.subject.getField("CN")?.value || "Unknown",
|
||||
issuer: signingCert.issuer.getField("CN")?.value || "Unknown",
|
||||
valid_from: signingCert.validity.notBefore.toISOString(),
|
||||
valid_to: signingCert.validity.notAfter.toISOString(),
|
||||
fingerprint: forge.md.sha256
|
||||
.create()
|
||||
.update(forge.asn1.toDer(forge.pki.certificateToAsn1(signingCert)).getBytes())
|
||||
.digest()
|
||||
.toHex(),
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to verify MCPB file: ${error}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Creates a signature block buffer with PKCS#7 signature
|
||||
*/
|
||||
function createSignatureBlock(pkcs7Signature) {
|
||||
const parts = [];
|
||||
// Header
|
||||
parts.push(Buffer.from(SIGNATURE_HEADER, "utf-8"));
|
||||
// PKCS#7 signature length and data
|
||||
const sigLengthBuffer = Buffer.alloc(4);
|
||||
sigLengthBuffer.writeUInt32LE(pkcs7Signature.length, 0);
|
||||
parts.push(sigLengthBuffer);
|
||||
parts.push(pkcs7Signature);
|
||||
// Footer
|
||||
parts.push(Buffer.from(SIGNATURE_FOOTER, "utf-8"));
|
||||
return Buffer.concat(parts);
|
||||
}
|
||||
/**
|
||||
* Extracts the signature block from a signed MCPB file
|
||||
*/
|
||||
export function extractSignatureBlock(fileContent) {
|
||||
// Look for signature footer at the end
|
||||
const footerBytes = Buffer.from(SIGNATURE_FOOTER, "utf-8");
|
||||
const footerIndex = fileContent.lastIndexOf(footerBytes);
|
||||
if (footerIndex === -1) {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
// Look for signature header before footer
|
||||
const headerBytes = Buffer.from(SIGNATURE_HEADER, "utf-8");
|
||||
let headerIndex = -1;
|
||||
// Search backwards from footer
|
||||
for (let i = footerIndex - 1; i >= 0; i--) {
|
||||
if (fileContent.slice(i, i + headerBytes.length).equals(headerBytes)) {
|
||||
headerIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (headerIndex === -1) {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
// Extract original content (everything before signature block)
|
||||
const originalContent = fileContent.slice(0, headerIndex);
|
||||
// Parse signature block
|
||||
let offset = headerIndex + headerBytes.length;
|
||||
try {
|
||||
// Read PKCS#7 signature length
|
||||
const sigLength = fileContent.readUInt32LE(offset);
|
||||
offset += 4;
|
||||
// Read PKCS#7 signature
|
||||
const pkcs7Signature = fileContent.slice(offset, offset + sigLength);
|
||||
return {
|
||||
originalContent,
|
||||
pkcs7Signature,
|
||||
};
|
||||
}
|
||||
catch {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Verifies certificate chain against OS trust store
|
||||
*/
|
||||
export async function verifyCertificateChain(certificate, intermediates) {
|
||||
let tempDir = null;
|
||||
try {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "mcpb-verify-"));
|
||||
const certChainPath = join(tempDir, "chain.pem");
|
||||
const certChain = [certificate, ...(intermediates || [])].join("\n");
|
||||
await writeFile(certChainPath, certChain);
|
||||
// Platform-specific verification
|
||||
if (process.platform === "darwin") {
|
||||
try {
|
||||
await execFileAsync("security", [
|
||||
"verify-cert",
|
||||
"-c",
|
||||
certChainPath,
|
||||
"-p",
|
||||
"codeSign",
|
||||
]);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (process.platform === "win32") {
|
||||
const psCommand = `
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$certCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
|
||||
$certCollection.Import('${certChainPath}')
|
||||
|
||||
if ($certCollection.Count -eq 0) {
|
||||
Write-Error 'No certificates found'
|
||||
exit 1
|
||||
}
|
||||
|
||||
$leafCert = $certCollection[0]
|
||||
$chain = New-Object System.Security.Cryptography.X509Certificates.X509Chain
|
||||
|
||||
# Enable revocation checking
|
||||
$chain.ChainPolicy.RevocationMode = 'Online'
|
||||
$chain.ChainPolicy.RevocationFlag = 'EntireChain'
|
||||
$chain.ChainPolicy.UrlRetrievalTimeout = New-TimeSpan -Seconds 30
|
||||
|
||||
# Add code signing application policy
|
||||
$codeSignOid = New-Object System.Security.Cryptography.Oid '1.3.6.1.5.5.7.3.3'
|
||||
$chain.ChainPolicy.ApplicationPolicy.Add($codeSignOid)
|
||||
|
||||
# Add intermediate certificates to extra store
|
||||
for ($i = 1; $i -lt $certCollection.Count; $i++) {
|
||||
[void]$chain.ChainPolicy.ExtraStore.Add($certCollection[$i])
|
||||
}
|
||||
|
||||
# Build and validate chain
|
||||
$result = $chain.Build($leafCert)
|
||||
|
||||
if ($result) {
|
||||
'Valid'
|
||||
} else {
|
||||
$chain.ChainStatus | ForEach-Object {
|
||||
Write-Error "$($_.Status): $($_.StatusInformation)"
|
||||
}
|
||||
exit 1
|
||||
}
|
||||
`.trim();
|
||||
const { stdout } = await execFileAsync("powershell.exe", [
|
||||
"-NoProfile",
|
||||
"-NonInteractive",
|
||||
"-Command",
|
||||
psCommand,
|
||||
]);
|
||||
return stdout.includes("Valid");
|
||||
}
|
||||
else {
|
||||
// Linux: Use openssl
|
||||
try {
|
||||
await execFileAsync("openssl", [
|
||||
"verify",
|
||||
"-purpose",
|
||||
"codesigning",
|
||||
"-CApath",
|
||||
"/etc/ssl/certs",
|
||||
certChainPath,
|
||||
]);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
finally {
|
||||
if (tempDir) {
|
||||
try {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes signature from a MCPB file
|
||||
*/
|
||||
export function unsignMcpbFile(mcpbPath) {
|
||||
const fileContent = readFileSync(mcpbPath);
|
||||
const { originalContent } = extractSignatureBlock(fileContent);
|
||||
writeFileSync(mcpbPath, originalContent);
|
||||
}
|
||||
124
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/validate.js
generated
vendored
Normal file
124
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/validate.js
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
import { existsSync, readFileSync, statSync } from "fs";
|
||||
import * as fs from "fs/promises";
|
||||
import { DestroyerOfModules } from "galactus";
|
||||
import * as os from "os";
|
||||
import { join, resolve } from "path";
|
||||
import prettyBytes from "pretty-bytes";
|
||||
import { unpackExtension } from "../cli/unpack.js";
|
||||
import { McpbManifestSchema } from "../schemas.js";
|
||||
import { McpbManifestSchema as LooseMcpbManifestSchema } from "../schemas-loose.js";
|
||||
export function validateManifest(inputPath) {
|
||||
try {
|
||||
const resolvedPath = resolve(inputPath);
|
||||
let manifestPath = resolvedPath;
|
||||
// If input is a directory, look for manifest.json inside it
|
||||
if (existsSync(resolvedPath) && statSync(resolvedPath).isDirectory()) {
|
||||
manifestPath = join(resolvedPath, "manifest.json");
|
||||
}
|
||||
const manifestContent = readFileSync(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(manifestContent);
|
||||
const result = McpbManifestSchema.safeParse(manifestData);
|
||||
if (result.success) {
|
||||
console.log("Manifest schema validation passes!");
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
console.log("ERROR: Manifest validation failed:\n");
|
||||
result.error.issues.forEach((issue) => {
|
||||
const path = issue.path.join(".");
|
||||
console.log(` - ${path ? `${path}: ` : ""}${issue.message}`);
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ENOENT")) {
|
||||
console.error(`ERROR: File not found: ${inputPath}`);
|
||||
if (existsSync(resolve(inputPath)) &&
|
||||
statSync(resolve(inputPath)).isDirectory()) {
|
||||
console.error(` (No manifest.json found in directory)`);
|
||||
}
|
||||
}
|
||||
else if (error.message.includes("JSON")) {
|
||||
console.error(`ERROR: Invalid JSON in manifest file: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
console.error(`ERROR: Error reading manifest: ${error.message}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.error("ERROR: Unknown error occurred");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export async function cleanMcpb(inputPath) {
|
||||
const tmpDir = await fs.mkdtemp(resolve(os.tmpdir(), "mcpb-clean-"));
|
||||
const mcpbPath = resolve(tmpDir, "in.mcpb");
|
||||
const unpackPath = resolve(tmpDir, "out");
|
||||
console.log(" -- Cleaning MCPB...");
|
||||
try {
|
||||
await fs.copyFile(inputPath, mcpbPath);
|
||||
console.log(" -- Unpacking MCPB...");
|
||||
await unpackExtension({ mcpbPath, silent: true, outputDir: unpackPath });
|
||||
const manifestPath = resolve(unpackPath, "manifest.json");
|
||||
const originalManifest = await fs.readFile(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(originalManifest);
|
||||
const result = LooseMcpbManifestSchema.safeParse(manifestData);
|
||||
if (!result.success) {
|
||||
throw new Error(`Unrecoverable manifest issues, please run "mcpb validate"`);
|
||||
}
|
||||
await fs.writeFile(manifestPath, JSON.stringify(result.data, null, 2));
|
||||
if (originalManifest.trim() !==
|
||||
(await fs.readFile(manifestPath, "utf8")).trim()) {
|
||||
console.log(" -- Update manifest to be valid per MCPB schema");
|
||||
}
|
||||
else {
|
||||
console.log(" -- Manifest already valid per MCPB schema");
|
||||
}
|
||||
const nodeModulesPath = resolve(unpackPath, "node_modules");
|
||||
if (existsSync(nodeModulesPath)) {
|
||||
console.log(" -- node_modules found, deleting development dependencies");
|
||||
const destroyer = new DestroyerOfModules({
|
||||
rootDirectory: unpackPath,
|
||||
});
|
||||
try {
|
||||
await destroyer.destroy();
|
||||
}
|
||||
catch (error) {
|
||||
// If modules have already been deleted in a previous clean, the walker
|
||||
// will fail when it can't find required dependencies. This is expected
|
||||
// and safe to ignore.
|
||||
if (error instanceof Error &&
|
||||
error.message.includes("Failed to locate module")) {
|
||||
console.log(" -- Some modules already removed, skipping remaining cleanup");
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
console.log(" -- Removed development dependencies from node_modules");
|
||||
}
|
||||
else {
|
||||
console.log(" -- No node_modules, not pruning");
|
||||
}
|
||||
const before = await fs.stat(inputPath);
|
||||
const { packExtension } = await import("../cli/pack.js");
|
||||
await packExtension({
|
||||
extensionPath: unpackPath,
|
||||
outputPath: inputPath,
|
||||
silent: true,
|
||||
});
|
||||
const after = await fs.stat(inputPath);
|
||||
console.log("\nClean Complete:");
|
||||
console.log("Before:", prettyBytes(before.size));
|
||||
console.log("After:", prettyBytes(after.size));
|
||||
}
|
||||
finally {
|
||||
await fs.rm(tmpDir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
105
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas-loose.js
generated
vendored
Normal file
105
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas-loose.js
generated
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
import * as z from "zod";
|
||||
export const McpServerConfigSchema = z.object({
|
||||
command: z.string(),
|
||||
args: z.array(z.string()).optional(),
|
||||
env: z.record(z.string(), z.string()).optional(),
|
||||
});
|
||||
export const McpbManifestAuthorSchema = z.object({
|
||||
name: z.string(),
|
||||
email: z.string().email().optional(),
|
||||
url: z.string().url().optional(),
|
||||
});
|
||||
export const McpbManifestRepositorySchema = z.object({
|
||||
type: z.string(),
|
||||
url: z.string().url(),
|
||||
});
|
||||
export const McpbManifestPlatformOverrideSchema = McpServerConfigSchema.partial();
|
||||
export const McpbManifestMcpConfigSchema = McpServerConfigSchema.extend({
|
||||
platform_overrides: z
|
||||
.record(z.string(), McpbManifestPlatformOverrideSchema)
|
||||
.optional(),
|
||||
});
|
||||
export const McpbManifestServerSchema = z.object({
|
||||
type: z.enum(["python", "node", "binary"]),
|
||||
entry_point: z.string(),
|
||||
mcp_config: McpbManifestMcpConfigSchema,
|
||||
});
|
||||
export const McpbManifestCompatibilitySchema = z
|
||||
.object({
|
||||
claude_desktop: z.string().optional(),
|
||||
platforms: z.array(z.enum(["darwin", "win32", "linux"])).optional(),
|
||||
runtimes: z
|
||||
.object({
|
||||
python: z.string().optional(),
|
||||
node: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.passthrough();
|
||||
export const McpbManifestToolSchema = z.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
});
|
||||
export const McpbManifestPromptSchema = z.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
arguments: z.array(z.string()).optional(),
|
||||
text: z.string(),
|
||||
});
|
||||
export const McpbUserConfigurationOptionSchema = z.object({
|
||||
type: z.enum(["string", "number", "boolean", "directory", "file"]),
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
required: z.boolean().optional(),
|
||||
default: z
|
||||
.union([z.string(), z.number(), z.boolean(), z.array(z.string())])
|
||||
.optional(),
|
||||
multiple: z.boolean().optional(),
|
||||
sensitive: z.boolean().optional(),
|
||||
min: z.number().optional(),
|
||||
max: z.number().optional(),
|
||||
});
|
||||
export const McpbUserConfigValuesSchema = z.record(z.string(), z.union([z.string(), z.number(), z.boolean(), z.array(z.string())]));
|
||||
export const McpbManifestSchema = z
|
||||
.object({
|
||||
$schema: z.string().optional(),
|
||||
dxt_version: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("@deprecated Use manifest_version instead"),
|
||||
manifest_version: z.string().optional(),
|
||||
name: z.string(),
|
||||
display_name: z.string().optional(),
|
||||
version: z.string(),
|
||||
description: z.string(),
|
||||
long_description: z.string().optional(),
|
||||
author: McpbManifestAuthorSchema,
|
||||
repository: McpbManifestRepositorySchema.optional(),
|
||||
homepage: z.string().url().optional(),
|
||||
documentation: z.string().url().optional(),
|
||||
support: z.string().url().optional(),
|
||||
icon: z.string().optional(),
|
||||
screenshots: z.array(z.string()).optional(),
|
||||
server: McpbManifestServerSchema,
|
||||
tools: z.array(McpbManifestToolSchema).optional(),
|
||||
tools_generated: z.boolean().optional(),
|
||||
prompts: z.array(McpbManifestPromptSchema).optional(),
|
||||
prompts_generated: z.boolean().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
license: z.string().optional(),
|
||||
compatibility: McpbManifestCompatibilitySchema.optional(),
|
||||
user_config: z
|
||||
.record(z.string(), McpbUserConfigurationOptionSchema)
|
||||
.optional(),
|
||||
})
|
||||
.refine((data) => !!(data.dxt_version || data.manifest_version), {
|
||||
message: "Either 'dxt_version' (deprecated) or 'manifest_version' must be provided",
|
||||
});
|
||||
export const McpbSignatureInfoSchema = z.object({
|
||||
status: z.enum(["signed", "unsigned", "self-signed"]),
|
||||
publisher: z.string().optional(),
|
||||
issuer: z.string().optional(),
|
||||
valid_from: z.string().optional(),
|
||||
valid_to: z.string().optional(),
|
||||
fingerprint: z.string().optional(),
|
||||
});
|
||||
107
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas.js
generated
vendored
Normal file
107
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import * as z from "zod";
|
||||
export const CURRENT_MANIFEST_VERSION = "0.2";
|
||||
export const McpServerConfigSchema = z.strictObject({
|
||||
command: z.string(),
|
||||
args: z.array(z.string()).optional(),
|
||||
env: z.record(z.string(), z.string()).optional(),
|
||||
});
|
||||
export const McpbManifestAuthorSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
email: z.string().email().optional(),
|
||||
url: z.string().url().optional(),
|
||||
});
|
||||
export const McpbManifestRepositorySchema = z.strictObject({
|
||||
type: z.string(),
|
||||
url: z.string().url(),
|
||||
});
|
||||
export const McpbManifestPlatformOverrideSchema = McpServerConfigSchema.partial();
|
||||
export const McpbManifestMcpConfigSchema = McpServerConfigSchema.extend({
|
||||
platform_overrides: z
|
||||
.record(z.string(), McpbManifestPlatformOverrideSchema)
|
||||
.optional(),
|
||||
});
|
||||
export const McpbManifestServerSchema = z.strictObject({
|
||||
type: z.enum(["python", "node", "binary"]),
|
||||
entry_point: z.string(),
|
||||
mcp_config: McpbManifestMcpConfigSchema,
|
||||
});
|
||||
export const McpbManifestCompatibilitySchema = z
|
||||
.strictObject({
|
||||
claude_desktop: z.string().optional(),
|
||||
platforms: z.array(z.enum(["darwin", "win32", "linux"])).optional(),
|
||||
runtimes: z
|
||||
.strictObject({
|
||||
python: z.string().optional(),
|
||||
node: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.passthrough();
|
||||
export const McpbManifestToolSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
});
|
||||
export const McpbManifestPromptSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
arguments: z.array(z.string()).optional(),
|
||||
text: z.string(),
|
||||
});
|
||||
export const McpbUserConfigurationOptionSchema = z.strictObject({
|
||||
type: z.enum(["string", "number", "boolean", "directory", "file"]),
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
required: z.boolean().optional(),
|
||||
default: z
|
||||
.union([z.string(), z.number(), z.boolean(), z.array(z.string())])
|
||||
.optional(),
|
||||
multiple: z.boolean().optional(),
|
||||
sensitive: z.boolean().optional(),
|
||||
min: z.number().optional(),
|
||||
max: z.number().optional(),
|
||||
});
|
||||
export const McpbUserConfigValuesSchema = z.record(z.string(), z.union([z.string(), z.number(), z.boolean(), z.array(z.string())]));
|
||||
export const McpbManifestSchema = z
|
||||
.strictObject({
|
||||
$schema: z.string().optional(),
|
||||
dxt_version: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("@deprecated Use manifest_version instead"),
|
||||
manifest_version: z.string().optional(),
|
||||
name: z.string(),
|
||||
display_name: z.string().optional(),
|
||||
version: z.string(),
|
||||
description: z.string(),
|
||||
long_description: z.string().optional(),
|
||||
author: McpbManifestAuthorSchema,
|
||||
repository: McpbManifestRepositorySchema.optional(),
|
||||
homepage: z.string().url().optional(),
|
||||
documentation: z.string().url().optional(),
|
||||
support: z.string().url().optional(),
|
||||
icon: z.string().optional(),
|
||||
screenshots: z.array(z.string()).optional(),
|
||||
server: McpbManifestServerSchema,
|
||||
tools: z.array(McpbManifestToolSchema).optional(),
|
||||
tools_generated: z.boolean().optional(),
|
||||
prompts: z.array(McpbManifestPromptSchema).optional(),
|
||||
prompts_generated: z.boolean().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
license: z.string().optional(),
|
||||
privacy_policies: z.array(z.string()).optional(),
|
||||
compatibility: McpbManifestCompatibilitySchema.optional(),
|
||||
user_config: z
|
||||
.record(z.string(), McpbUserConfigurationOptionSchema)
|
||||
.optional(),
|
||||
})
|
||||
.refine((data) => !!(data.dxt_version || data.manifest_version), {
|
||||
message: "Either 'dxt_version' (deprecated) or 'manifest_version' must be provided",
|
||||
});
|
||||
export const McpbSignatureInfoSchema = z.strictObject({
|
||||
status: z.enum(["signed", "unsigned", "self-signed"]),
|
||||
publisher: z.string().optional(),
|
||||
issuer: z.string().optional(),
|
||||
valid_from: z.string().optional(),
|
||||
valid_to: z.string().optional(),
|
||||
fingerprint: z.string().optional(),
|
||||
});
|
||||
157
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/config.js
generated
vendored
Normal file
157
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/config.js
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
/**
|
||||
* This file contains utility functions for handling MCPB configuration,
|
||||
* including variable replacement and MCP server configuration generation.
|
||||
*/
|
||||
/**
|
||||
* Recursively replaces variables in any value. Handles strings, arrays, and objects.
|
||||
*
|
||||
* @param value The value to process
|
||||
* @param variables Object containing variable replacements
|
||||
* @returns The processed value with all variables replaced
|
||||
*/
|
||||
export function replaceVariables(value, variables) {
|
||||
if (typeof value === "string") {
|
||||
let result = value;
|
||||
// Replace all variables in the string
|
||||
for (const [key, replacement] of Object.entries(variables)) {
|
||||
const pattern = new RegExp(`\\$\\{${key}\\}`, "g");
|
||||
// Check if this pattern actually exists in the string
|
||||
if (result.match(pattern)) {
|
||||
if (Array.isArray(replacement)) {
|
||||
console.warn(`Cannot replace ${key} with array value in string context: "${value}"`, { key, replacement });
|
||||
}
|
||||
else {
|
||||
result = result.replace(pattern, replacement);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
else if (Array.isArray(value)) {
|
||||
// For arrays, we need to handle special case of array expansion
|
||||
const result = [];
|
||||
for (const item of value) {
|
||||
if (typeof item === "string" &&
|
||||
item.match(/^\$\{user_config\.[^}]+\}$/)) {
|
||||
// This is a user config variable that might expand to multiple values
|
||||
const varName = item.match(/^\$\{([^}]+)\}$/)?.[1];
|
||||
if (varName && variables[varName]) {
|
||||
const replacement = variables[varName];
|
||||
if (Array.isArray(replacement)) {
|
||||
// Expand array inline
|
||||
result.push(...replacement);
|
||||
}
|
||||
else {
|
||||
result.push(replacement);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Variable not found, keep original
|
||||
result.push(item);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Recursively process non-variable items
|
||||
result.push(replaceVariables(item, variables));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
else if (value && typeof value === "object") {
|
||||
const result = {};
|
||||
for (const [key, val] of Object.entries(value)) {
|
||||
result[key] = replaceVariables(val, variables);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
export async function getMcpConfigForManifest(options) {
|
||||
const { manifest, extensionPath, systemDirs, userConfig, pathSeparator, logger, } = options;
|
||||
const baseConfig = manifest.server?.mcp_config;
|
||||
if (!baseConfig) {
|
||||
return undefined;
|
||||
}
|
||||
let result = {
|
||||
...baseConfig,
|
||||
};
|
||||
if (baseConfig.platform_overrides) {
|
||||
if (process.platform in baseConfig.platform_overrides) {
|
||||
const platformConfig = baseConfig.platform_overrides[process.platform];
|
||||
result.command = platformConfig.command || result.command;
|
||||
result.args = platformConfig.args || result.args;
|
||||
result.env = platformConfig.env || result.env;
|
||||
}
|
||||
}
|
||||
// Check if required configuration is missing
|
||||
if (hasRequiredConfigMissing({ manifest, userConfig })) {
|
||||
logger?.warn(`Extension ${manifest.name} has missing required configuration, skipping MCP config`);
|
||||
return undefined;
|
||||
}
|
||||
const variables = {
|
||||
__dirname: extensionPath,
|
||||
pathSeparator,
|
||||
"/": pathSeparator,
|
||||
...systemDirs,
|
||||
};
|
||||
// Build merged configuration from defaults and user settings
|
||||
const mergedConfig = {};
|
||||
// First, add defaults from manifest
|
||||
if (manifest.user_config) {
|
||||
for (const [key, configOption] of Object.entries(manifest.user_config)) {
|
||||
if (configOption.default !== undefined) {
|
||||
mergedConfig[key] = configOption.default;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Then, override with user settings
|
||||
if (userConfig) {
|
||||
Object.assign(mergedConfig, userConfig);
|
||||
}
|
||||
// Add merged configuration variables for substitution
|
||||
for (const [key, value] of Object.entries(mergedConfig)) {
|
||||
// Convert user config to the format expected by variable substitution
|
||||
const userConfigKey = `user_config.${key}`;
|
||||
if (Array.isArray(value)) {
|
||||
// Keep arrays as arrays for proper expansion
|
||||
variables[userConfigKey] = value.map(String);
|
||||
}
|
||||
else if (typeof value === "boolean") {
|
||||
// Convert booleans to "true"/"false" strings as per spec
|
||||
variables[userConfigKey] = value ? "true" : "false";
|
||||
}
|
||||
else {
|
||||
// Convert other types to strings
|
||||
variables[userConfigKey] = String(value);
|
||||
}
|
||||
}
|
||||
// Replace all variables in the config
|
||||
result = replaceVariables(result, variables);
|
||||
return result;
|
||||
}
|
||||
function isInvalidSingleValue(value) {
|
||||
return value === undefined || value === null || value === "";
|
||||
}
|
||||
/**
|
||||
* Check if an extension has missing required configuration
|
||||
* @param manifest The extension manifest
|
||||
* @param userConfig The user configuration
|
||||
* @returns true if required configuration is missing
|
||||
*/
|
||||
export function hasRequiredConfigMissing({ manifest, userConfig, }) {
|
||||
if (!manifest.user_config) {
|
||||
return false;
|
||||
}
|
||||
const config = userConfig || {};
|
||||
for (const [key, configOption] of Object.entries(manifest.user_config)) {
|
||||
if (configOption.required) {
|
||||
const value = config[key];
|
||||
if (isInvalidSingleValue(value) ||
|
||||
(Array.isArray(value) &&
|
||||
(value.length === 0 || value.some(isInvalidSingleValue)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
29
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/log.js
generated
vendored
Normal file
29
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/log.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
export function getLogger({ silent = false } = {}) {
|
||||
return {
|
||||
log: (...args) => {
|
||||
if (!silent) {
|
||||
console.log(...args);
|
||||
}
|
||||
},
|
||||
error: (...args) => {
|
||||
if (!silent) {
|
||||
console.error(...args);
|
||||
}
|
||||
},
|
||||
warn: (...args) => {
|
||||
if (!silent) {
|
||||
console.warn(...args);
|
||||
}
|
||||
},
|
||||
info: (...args) => {
|
||||
if (!silent) {
|
||||
console.info(...args);
|
||||
}
|
||||
},
|
||||
debug: (...args) => {
|
||||
if (!silent) {
|
||||
console.debug(...args);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user