mirror of
https://github.com/tvytlx/ai-agent-deep-dive.git
synced 2026-04-03 15:44:49 +08:00
Remove node_modules from extracted source tree
This commit is contained in:
50
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/ansiCodes.js
generated
vendored
50
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/ansiCodes.js
generated
vendored
@@ -1,50 +0,0 @@
|
||||
import ansiStyles from "ansi-styles";
|
||||
export const ESCAPES = new Set([27, 155]); // \x1b and \x9b
|
||||
export const CSI = "[".codePointAt(0);
|
||||
export const OSC = "]".codePointAt(0);
|
||||
export const endCodesSet = new Set();
|
||||
const endCodesMap = new Map();
|
||||
for (const [start, end] of ansiStyles.codes) {
|
||||
endCodesSet.add(ansiStyles.color.ansi(end));
|
||||
endCodesMap.set(ansiStyles.color.ansi(start), ansiStyles.color.ansi(end));
|
||||
}
|
||||
export const linkStartCodePrefix = "\x1B]8;;";
|
||||
export const linkStartCodePrefixCharCodes = linkStartCodePrefix
|
||||
.split("")
|
||||
.map((char) => char.charCodeAt(0));
|
||||
export const linkCodeSuffix = "\x07";
|
||||
export const linkCodeSuffixCharCode = linkCodeSuffix.charCodeAt(0);
|
||||
export const linkEndCode = `\x1B]8;;${linkCodeSuffix}`;
|
||||
export function getLinkStartCode(url) {
|
||||
return `${linkStartCodePrefix}${url}${linkCodeSuffix}`;
|
||||
}
|
||||
export function getEndCode(code) {
|
||||
if (endCodesSet.has(code))
|
||||
return code;
|
||||
if (endCodesMap.has(code))
|
||||
return endCodesMap.get(code);
|
||||
// We have a few special cases to handle here:
|
||||
// Links:
|
||||
if (code.startsWith(linkStartCodePrefix))
|
||||
return linkEndCode;
|
||||
code = code.slice(2);
|
||||
// 8-bit/24-bit colors:
|
||||
if (code.startsWith("38")) {
|
||||
return ansiStyles.color.close;
|
||||
}
|
||||
else if (code.startsWith("48")) {
|
||||
return ansiStyles.bgColor.close;
|
||||
}
|
||||
// Otherwise find the reset code in the ansi-styles map
|
||||
const ret = ansiStyles.codes.get(parseInt(code, 10));
|
||||
if (ret) {
|
||||
return ansiStyles.color.ansi(ret);
|
||||
}
|
||||
else {
|
||||
return ansiStyles.reset.open;
|
||||
}
|
||||
}
|
||||
export function ansiCodesToString(codes) {
|
||||
return codes.map((code) => code.code).join("");
|
||||
}
|
||||
//# sourceMappingURL=ansiCodes.js.map
|
||||
17
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/diff.js
generated
vendored
17
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/diff.js
generated
vendored
@@ -1,17 +0,0 @@
|
||||
import { undoAnsiCodes } from "./undo.js";
|
||||
/**
|
||||
* Returns the minimum amount of ANSI codes necessary to get from the compound style `from` to `to`.
|
||||
* Both `from` and `to` are expected to be reduced.
|
||||
*/
|
||||
export function diffAnsiCodes(from, to) {
|
||||
const endCodesInTo = new Set(to.map((code) => code.endCode));
|
||||
const startCodesInFrom = new Set(from.map((code) => code.code));
|
||||
return [
|
||||
// Ignore all styles in `from` that are not overwritten or removed by `to`
|
||||
// Disable all styles in `from` that are removed in `to`
|
||||
...undoAnsiCodes(from.filter((code) => !endCodesInTo.has(code.endCode))),
|
||||
// Add all styles in `to` that don't exist in `from`
|
||||
...to.filter((code) => !startCodesInFrom.has(code.code)),
|
||||
];
|
||||
}
|
||||
//# sourceMappingURL=diff.js.map
|
||||
7
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/index.js
generated
vendored
7
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/index.js
generated
vendored
@@ -1,7 +0,0 @@
|
||||
export { ansiCodesToString } from "./ansiCodes.js";
|
||||
export { diffAnsiCodes } from "./diff.js";
|
||||
export { reduceAnsiCodes, reduceAnsiCodesIncremental } from "./reduce.js";
|
||||
export * from "./styledChars.js";
|
||||
export * from "./tokenize.js";
|
||||
export { undoAnsiCodes } from "./undo.js";
|
||||
//# sourceMappingURL=index.js.map
|
||||
38
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/reduce.js
generated
vendored
38
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/reduce.js
generated
vendored
@@ -1,38 +0,0 @@
|
||||
import ansiStyles from "ansi-styles";
|
||||
import { endCodesSet } from "./ansiCodes.js";
|
||||
/** Reduces the given array of ANSI codes to the minimum necessary to render with the same style */
|
||||
export function reduceAnsiCodes(codes) {
|
||||
return reduceAnsiCodesIncremental([], codes);
|
||||
}
|
||||
/** Like {@link reduceAnsiCodes}, but assumes that `codes` is already reduced. Further reductions are only done for the items in `newCodes`. */
|
||||
export function reduceAnsiCodesIncremental(codes, newCodes) {
|
||||
let ret = [...codes];
|
||||
for (const code of newCodes) {
|
||||
if (code.code === ansiStyles.reset.open) {
|
||||
// Reset code, disable all codes
|
||||
ret = [];
|
||||
}
|
||||
else if (endCodesSet.has(code.code)) {
|
||||
// This is an end code, disable all matching start codes
|
||||
ret = ret.filter((retCode) => retCode.endCode !== code.code);
|
||||
}
|
||||
else {
|
||||
// This is a start code. Remove codes it "overrides", then add it.
|
||||
// If a new code has the same endCode, it "overrides" existing ones.
|
||||
// Special case: Intensity codes (1m, 2m) can coexist (both end with 22m).
|
||||
const isIntensityCode = code.code === ansiStyles.bold.open || code.code === ansiStyles.dim.open;
|
||||
// Add intensity codes only if not already present
|
||||
if (isIntensityCode) {
|
||||
if (!ret.find((retCode) => retCode.code === code.code && retCode.endCode === code.endCode)) {
|
||||
ret.push(code);
|
||||
}
|
||||
}
|
||||
else {
|
||||
ret = ret.filter((retCode) => retCode.endCode !== code.endCode);
|
||||
ret.push(code);
|
||||
}
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
//# sourceMappingURL=reduce.js.map
|
||||
38
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/styledChars.js
generated
vendored
38
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/styledChars.js
generated
vendored
@@ -1,38 +0,0 @@
|
||||
import { ansiCodesToString } from "./ansiCodes.js";
|
||||
import { diffAnsiCodes } from "./diff.js";
|
||||
import { reduceAnsiCodesIncremental } from "./reduce.js";
|
||||
export function styledCharsFromTokens(tokens) {
|
||||
let codes = [];
|
||||
const ret = [];
|
||||
for (const token of tokens) {
|
||||
if (token.type === "ansi") {
|
||||
codes = reduceAnsiCodesIncremental(codes, [token]);
|
||||
}
|
||||
else if (token.type === "char") {
|
||||
ret.push({
|
||||
...token,
|
||||
styles: [...codes],
|
||||
});
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
export function styledCharsToString(chars) {
|
||||
let ret = "";
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const char = chars[i];
|
||||
if (i === 0) {
|
||||
ret += ansiCodesToString(char.styles);
|
||||
}
|
||||
else {
|
||||
ret += ansiCodesToString(diffAnsiCodes(chars[i - 1].styles, char.styles));
|
||||
}
|
||||
ret += char.value;
|
||||
// reset active styles at the end of the string
|
||||
if (i === chars.length - 1) {
|
||||
ret += ansiCodesToString(diffAnsiCodes(char.styles, []));
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
//# sourceMappingURL=styledChars.js.map
|
||||
141
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/tokenize.js
generated
vendored
141
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/tokenize.js
generated
vendored
@@ -1,141 +0,0 @@
|
||||
import isFullwidthCodePoint from "is-fullwidth-code-point";
|
||||
import { CSI, ESCAPES, getEndCode, linkStartCodePrefix, linkStartCodePrefixCharCodes, OSC, } from "./ansiCodes.js";
|
||||
// HOT PATH: Use only basic string/char code operations for maximum performance
|
||||
function parseLinkCode(string, offset) {
|
||||
string = string.slice(offset);
|
||||
for (let index = 1; index < linkStartCodePrefixCharCodes.length; index++) {
|
||||
if (string.charCodeAt(index) !== linkStartCodePrefixCharCodes[index]) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
// This is a link code (with or without the URL part). Find the end of it.
|
||||
const endIndex = string.indexOf("\x07", linkStartCodePrefix.length);
|
||||
if (endIndex === -1)
|
||||
return undefined;
|
||||
return string.slice(0, endIndex + 1);
|
||||
}
|
||||
const CC_0 = "0".charCodeAt(0);
|
||||
const CC_9 = "9".charCodeAt(0);
|
||||
const CC_SEMI = ";".charCodeAt(0);
|
||||
const CC_M = "m".charCodeAt(0);
|
||||
/**
|
||||
* Scans through the given string and finds the index of the last character of an SGR sequence
|
||||
* like `\x1B[38;2;123;123;123m`. This assumes that the string has been checked to start with `\x1B[`.
|
||||
* Returns -1 if no valid SGR sequence is found.
|
||||
*/
|
||||
function findSGRSequenceEndIndex(str) {
|
||||
for (let index = 2; index < str.length; index++) {
|
||||
const charCode = str.charCodeAt(index);
|
||||
// m marks the end of the SGR sequence
|
||||
if (charCode === CC_M)
|
||||
return index;
|
||||
// Digits and semicolons are valid
|
||||
if (charCode === CC_SEMI)
|
||||
continue;
|
||||
if (charCode >= CC_0 && charCode <= CC_9)
|
||||
continue;
|
||||
// Everything else is invalid
|
||||
break;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
// HOT PATH: Use only basic string/char code operations for maximum performance
|
||||
function parseSGRSequence(string, offset) {
|
||||
string = string.slice(offset);
|
||||
const endIndex = findSGRSequenceEndIndex(string);
|
||||
if (endIndex === -1)
|
||||
return;
|
||||
return string.slice(0, endIndex + 1);
|
||||
}
|
||||
/**
|
||||
* Splits compound SGR sequences like `\x1B[1;3;31m` into individual components
|
||||
*/
|
||||
function splitCompoundSGRSequences(code) {
|
||||
if (!code.includes(";")) {
|
||||
// Not a compound code
|
||||
return [code];
|
||||
}
|
||||
const codeParts = code
|
||||
// Strip off the escape sequences \x1B[ and m
|
||||
.slice(2, -1)
|
||||
.split(";");
|
||||
const ret = [];
|
||||
for (let i = 0; i < codeParts.length; i++) {
|
||||
const rawCode = codeParts[i];
|
||||
// Keep 8-bit and 24-bit color codes (containing multiple ";") together
|
||||
if (rawCode === "38" || rawCode === "48") {
|
||||
if (i + 2 < codeParts.length && codeParts[i + 1] === "5") {
|
||||
// 8-bit color, followed by another number
|
||||
ret.push(codeParts.slice(i, i + 3).join(";"));
|
||||
i += 2;
|
||||
continue;
|
||||
}
|
||||
else if (i + 4 < codeParts.length && codeParts[i + 1] === "2") {
|
||||
// 24-bit color, followed by three numbers
|
||||
ret.push(codeParts.slice(i, i + 5).join(";"));
|
||||
i += 4;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Not a (valid) 8/24-bit color code, push as is
|
||||
ret.push(rawCode);
|
||||
}
|
||||
return ret.map((part) => `\x1b[${part}m`);
|
||||
}
|
||||
export function tokenize(str, endChar = Number.POSITIVE_INFINITY) {
|
||||
const ret = [];
|
||||
let index = 0;
|
||||
let visible = 0;
|
||||
while (index < str.length) {
|
||||
const codePoint = str.codePointAt(index);
|
||||
if (ESCAPES.has(codePoint)) {
|
||||
let code;
|
||||
// Peek the next code point to determine the type of ANSI sequence
|
||||
const nextCodePoint = str.codePointAt(index + 1);
|
||||
if (nextCodePoint === OSC) {
|
||||
// ] = operating system commands, like links
|
||||
code = parseLinkCode(str, index);
|
||||
if (code) {
|
||||
ret.push({
|
||||
type: "ansi",
|
||||
code: code,
|
||||
endCode: getEndCode(code),
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (nextCodePoint === CSI) {
|
||||
// [ = control sequence introducer, like SGR sequences [...m
|
||||
code = parseSGRSequence(str, index);
|
||||
if (code) {
|
||||
// Split compound codes into individual tokens
|
||||
const codes = splitCompoundSGRSequences(code);
|
||||
for (const individualCode of codes) {
|
||||
ret.push({
|
||||
type: "ansi",
|
||||
code: individualCode,
|
||||
endCode: getEndCode(individualCode),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if (code) {
|
||||
index += code.length;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
const fullWidth = isFullwidthCodePoint(codePoint);
|
||||
const character = String.fromCodePoint(codePoint);
|
||||
ret.push({
|
||||
type: "char",
|
||||
value: character,
|
||||
fullWidth,
|
||||
});
|
||||
index += character.length;
|
||||
visible += fullWidth ? 2 : character.length;
|
||||
if (visible >= endChar) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
//# sourceMappingURL=tokenize.js.map
|
||||
11
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/undo.js
generated
vendored
11
extracted-source/node_modules/@alcalzone/ansi-tokenize/build/undo.js
generated
vendored
@@ -1,11 +0,0 @@
|
||||
import { reduceAnsiCodes } from "./reduce.js";
|
||||
/** Returns the combination of ANSI codes needed to undo the given ANSI codes */
|
||||
export function undoAnsiCodes(codes) {
|
||||
return reduceAnsiCodes(codes)
|
||||
.reverse()
|
||||
.map((code) => ({
|
||||
...code,
|
||||
code: code.endCode,
|
||||
}));
|
||||
}
|
||||
//# sourceMappingURL=undo.js.map
|
||||
1126
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/bridgeClient.ts
generated
vendored
1126
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/bridgeClient.ts
generated
vendored
File diff suppressed because it is too large
Load Diff
546
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/browserTools.ts
generated
vendored
546
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/browserTools.ts
generated
vendored
@@ -1,546 +0,0 @@
|
||||
export const BROWSER_TOOLS = [
|
||||
{
|
||||
name: "javascript_tool",
|
||||
description:
|
||||
"Execute JavaScript code in the context of the current page. The code runs in the page's context and can interact with the DOM, window object, and page variables. Returns the result of the last expression or any thrown errors. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
action: {
|
||||
type: "string",
|
||||
description: "Must be set to 'javascript_exec'",
|
||||
},
|
||||
text: {
|
||||
type: "string",
|
||||
description:
|
||||
"The JavaScript code to execute. The code will be evaluated in the page context. The result of the last expression will be returned automatically. Do NOT use 'return' statements - just write the expression you want to evaluate (e.g., 'window.myData.value' not 'return window.myData.value'). You can access and modify the DOM, call page functions, and interact with page variables.",
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to execute the code in. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
},
|
||||
required: ["action", "text", "tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "read_page",
|
||||
description:
|
||||
"Get an accessibility tree representation of elements on the page. By default returns all elements including non-visible ones. Output is limited to 50000 characters by default. If the output exceeds this limit, you will receive an error asking you to specify a smaller depth or focus on a specific element using ref_id. Optionally filter for only interactive elements. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
filter: {
|
||||
type: "string",
|
||||
enum: ["interactive", "all"],
|
||||
description:
|
||||
'Filter elements: "interactive" for buttons/links/inputs only, "all" for all elements including non-visible ones (default: all elements)',
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to read from. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
depth: {
|
||||
type: "number",
|
||||
description:
|
||||
"Maximum depth of the tree to traverse (default: 15). Use a smaller depth if output is too large.",
|
||||
},
|
||||
ref_id: {
|
||||
type: "string",
|
||||
description:
|
||||
"Reference ID of a parent element to read. Will return the specified element and all its children. Use this to focus on a specific part of the page when output is too large.",
|
||||
},
|
||||
max_chars: {
|
||||
type: "number",
|
||||
description:
|
||||
"Maximum characters for output (default: 50000). Set to a higher value if your client can handle large outputs.",
|
||||
},
|
||||
},
|
||||
required: ["tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "find",
|
||||
description:
|
||||
'Find elements on the page using natural language. Can search for elements by their purpose (e.g., "search bar", "login button") or by text content (e.g., "organic mango product"). Returns up to 20 matching elements with references that can be used with other tools. If more than 20 matches exist, you\'ll be notified to use a more specific query. If you don\'t have a valid tab ID, use tabs_context_mcp first to get available tabs.',
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: {
|
||||
type: "string",
|
||||
description:
|
||||
'Natural language description of what to find (e.g., "search bar", "add to cart button", "product title containing organic")',
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to search in. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
},
|
||||
required: ["query", "tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "form_input",
|
||||
description:
|
||||
"Set values in form elements using element reference ID from the read_page tool. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
ref: {
|
||||
type: "string",
|
||||
description:
|
||||
'Element reference ID from the read_page tool (e.g., "ref_1", "ref_2")',
|
||||
},
|
||||
value: {
|
||||
type: ["string", "boolean", "number"],
|
||||
description:
|
||||
"The value to set. For checkboxes use boolean, for selects use option value or text, for other inputs use appropriate string/number",
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to set form value in. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
},
|
||||
required: ["ref", "value", "tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "computer",
|
||||
description: `Use a mouse and keyboard to interact with a web browser, and take screenshots. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs.\n* Whenever you intend to click on an element like an icon, you should consult a screenshot to determine the coordinates of the element before moving the cursor.\n* If you tried clicking on a program or link but it failed to load, even after waiting, try adjusting your click location so that the tip of the cursor visually falls on the element that you want to click.\n* Make sure to click any buttons, links, icons, etc with the cursor tip in the center of the element. Don't click boxes on their edges unless asked.`,
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
action: {
|
||||
type: "string",
|
||||
enum: [
|
||||
"left_click",
|
||||
"right_click",
|
||||
"type",
|
||||
"screenshot",
|
||||
"wait",
|
||||
"scroll",
|
||||
"key",
|
||||
"left_click_drag",
|
||||
"double_click",
|
||||
"triple_click",
|
||||
"zoom",
|
||||
"scroll_to",
|
||||
"hover",
|
||||
],
|
||||
description:
|
||||
"The action to perform:\n* `left_click`: Click the left mouse button at the specified coordinates.\n* `right_click`: Click the right mouse button at the specified coordinates to open context menus.\n* `double_click`: Double-click the left mouse button at the specified coordinates.\n* `triple_click`: Triple-click the left mouse button at the specified coordinates.\n* `type`: Type a string of text.\n* `screenshot`: Take a screenshot of the screen.\n* `wait`: Wait for a specified number of seconds.\n* `scroll`: Scroll up, down, left, or right at the specified coordinates.\n* `key`: Press a specific keyboard key.\n* `left_click_drag`: Drag from start_coordinate to coordinate.\n* `zoom`: Take a screenshot of a specific region for closer inspection.\n* `scroll_to`: Scroll an element into view using its element reference ID from read_page or find tools.\n* `hover`: Move the mouse cursor to the specified coordinates or element without clicking. Useful for revealing tooltips, dropdown menus, or triggering hover states.",
|
||||
},
|
||||
coordinate: {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
minItems: 2,
|
||||
maxItems: 2,
|
||||
description:
|
||||
"(x, y): The x (pixels from the left edge) and y (pixels from the top edge) coordinates. Required for `left_click`, `right_click`, `double_click`, `triple_click`, and `scroll`. For `left_click_drag`, this is the end position.",
|
||||
},
|
||||
text: {
|
||||
type: "string",
|
||||
description:
|
||||
'The text to type (for `type` action) or the key(s) to press (for `key` action). For `key` action: Provide space-separated keys (e.g., "Backspace Backspace Delete"). Supports keyboard shortcuts using the platform\'s modifier key (use "cmd" on Mac, "ctrl" on Windows/Linux, e.g., "cmd+a" or "ctrl+a" for select all).',
|
||||
},
|
||||
duration: {
|
||||
type: "number",
|
||||
minimum: 0,
|
||||
maximum: 30,
|
||||
description:
|
||||
"The number of seconds to wait. Required for `wait`. Maximum 30 seconds.",
|
||||
},
|
||||
scroll_direction: {
|
||||
type: "string",
|
||||
enum: ["up", "down", "left", "right"],
|
||||
description: "The direction to scroll. Required for `scroll`.",
|
||||
},
|
||||
scroll_amount: {
|
||||
type: "number",
|
||||
minimum: 1,
|
||||
maximum: 10,
|
||||
description:
|
||||
"The number of scroll wheel ticks. Optional for `scroll`, defaults to 3.",
|
||||
},
|
||||
start_coordinate: {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
minItems: 2,
|
||||
maxItems: 2,
|
||||
description:
|
||||
"(x, y): The starting coordinates for `left_click_drag`.",
|
||||
},
|
||||
region: {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
minItems: 4,
|
||||
maxItems: 4,
|
||||
description:
|
||||
"(x0, y0, x1, y1): The rectangular region to capture for `zoom`. Coordinates define a rectangle from top-left (x0, y0) to bottom-right (x1, y1) in pixels from the viewport origin. Required for `zoom` action. Useful for inspecting small UI elements like icons, buttons, or text.",
|
||||
},
|
||||
repeat: {
|
||||
type: "number",
|
||||
minimum: 1,
|
||||
maximum: 100,
|
||||
description:
|
||||
"Number of times to repeat the key sequence. Only applicable for `key` action. Must be a positive integer between 1 and 100. Default is 1. Useful for navigation tasks like pressing arrow keys multiple times.",
|
||||
},
|
||||
ref: {
|
||||
type: "string",
|
||||
description:
|
||||
'Element reference ID from read_page or find tools (e.g., "ref_1", "ref_2"). Required for `scroll_to` action. Can be used as alternative to `coordinate` for click actions.',
|
||||
},
|
||||
modifiers: {
|
||||
type: "string",
|
||||
description:
|
||||
'Modifier keys for click actions. Supports: "ctrl", "shift", "alt", "cmd" (or "meta"), "win" (or "windows"). Can be combined with "+" (e.g., "ctrl+shift", "cmd+alt"). Optional.',
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to execute the action on. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
},
|
||||
required: ["action", "tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "navigate",
|
||||
description:
|
||||
"Navigate to a URL, or go forward/back in browser history. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
url: {
|
||||
type: "string",
|
||||
description:
|
||||
'The URL to navigate to. Can be provided with or without protocol (defaults to https://). Use "forward" to go forward in history or "back" to go back in history.',
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to navigate. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
},
|
||||
required: ["url", "tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "resize_window",
|
||||
description:
|
||||
"Resize the current browser window to specified dimensions. Useful for testing responsive designs or setting up specific screen sizes. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
width: {
|
||||
type: "number",
|
||||
description: "Target window width in pixels",
|
||||
},
|
||||
height: {
|
||||
type: "number",
|
||||
description: "Target window height in pixels",
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to get the window for. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
},
|
||||
required: ["width", "height", "tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "gif_creator",
|
||||
description:
|
||||
"Manage GIF recording and export for browser automation sessions. Control when to start/stop recording browser actions (clicks, scrolls, navigation), then export as an animated GIF with visual overlays (click indicators, action labels, progress bar, watermark). All operations are scoped to the tab's group. When starting recording, take a screenshot immediately after to capture the initial state as the first frame. When stopping recording, take a screenshot immediately before to capture the final state as the last frame. For export, either provide 'coordinate' to drag/drop upload to a page element, or set 'download: true' to download the GIF.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
action: {
|
||||
type: "string",
|
||||
enum: ["start_recording", "stop_recording", "export", "clear"],
|
||||
description:
|
||||
"Action to perform: 'start_recording' (begin capturing), 'stop_recording' (stop capturing but keep frames), 'export' (generate and export GIF), 'clear' (discard frames)",
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to identify which tab group this operation applies to",
|
||||
},
|
||||
download: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Always set this to true for the 'export' action only. This causes the gif to be downloaded in the browser.",
|
||||
},
|
||||
filename: {
|
||||
type: "string",
|
||||
description:
|
||||
"Optional filename for exported GIF (default: 'recording-[timestamp].gif'). For 'export' action only.",
|
||||
},
|
||||
options: {
|
||||
type: "object",
|
||||
description:
|
||||
"Optional GIF enhancement options for 'export' action. Properties: showClickIndicators (bool), showDragPaths (bool), showActionLabels (bool), showProgressBar (bool), showWatermark (bool), quality (number 1-30). All default to true except quality (default: 10).",
|
||||
properties: {
|
||||
showClickIndicators: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Show orange circles at click locations (default: true)",
|
||||
},
|
||||
showDragPaths: {
|
||||
type: "boolean",
|
||||
description: "Show red arrows for drag actions (default: true)",
|
||||
},
|
||||
showActionLabels: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Show black labels describing actions (default: true)",
|
||||
},
|
||||
showProgressBar: {
|
||||
type: "boolean",
|
||||
description: "Show orange progress bar at bottom (default: true)",
|
||||
},
|
||||
showWatermark: {
|
||||
type: "boolean",
|
||||
description: "Show Claude logo watermark (default: true)",
|
||||
},
|
||||
quality: {
|
||||
type: "number",
|
||||
description:
|
||||
"GIF compression quality, 1-30 (lower = better quality, slower encoding). Default: 10",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["action", "tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "upload_image",
|
||||
description:
|
||||
"Upload a previously captured screenshot or user-uploaded image to a file input or drag & drop target. Supports two approaches: (1) ref - for targeting specific elements, especially hidden file inputs, (2) coordinate - for drag & drop to visible locations like Google Docs. Provide either ref or coordinate, not both.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
imageId: {
|
||||
type: "string",
|
||||
description:
|
||||
"ID of a previously captured screenshot (from the computer tool's screenshot action) or a user-uploaded image",
|
||||
},
|
||||
ref: {
|
||||
type: "string",
|
||||
description:
|
||||
'Element reference ID from read_page or find tools (e.g., "ref_1", "ref_2"). Use this for file inputs (especially hidden ones) or specific elements. Provide either ref or coordinate, not both.',
|
||||
},
|
||||
coordinate: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "number",
|
||||
},
|
||||
description:
|
||||
"Viewport coordinates [x, y] for drag & drop to a visible location. Use this for drag & drop targets like Google Docs. Provide either ref or coordinate, not both.",
|
||||
},
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID where the target element is located. This is where the image will be uploaded to.",
|
||||
},
|
||||
filename: {
|
||||
type: "string",
|
||||
description:
|
||||
'Optional filename for the uploaded file (default: "image.png")',
|
||||
},
|
||||
},
|
||||
required: ["imageId", "tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "get_page_text",
|
||||
description:
|
||||
"Extract raw text content from the page, prioritizing article content. Ideal for reading articles, blog posts, or other text-heavy pages. Returns plain text without HTML formatting. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to extract text from. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
},
|
||||
required: ["tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tabs_context_mcp",
|
||||
title: "Tabs Context",
|
||||
description:
|
||||
"Get context information about the current MCP tab group. Returns all tab IDs inside the group if it exists. CRITICAL: You must get the context at least once before using other browser automation tools so you know what tabs exist. Each new conversation should create its own new tab (using tabs_create_mcp) rather than reusing existing tabs, unless the user explicitly asks to use an existing tab.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
createIfEmpty: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Creates a new MCP tab group if none exists, creates a new Window with a new tab group containing an empty tab (which can be used for this conversation). If a MCP tab group already exists, this parameter has no effect.",
|
||||
},
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "tabs_create_mcp",
|
||||
title: "Tabs Create",
|
||||
description:
|
||||
"Creates a new empty tab in the MCP tab group. CRITICAL: You must get the context using tabs_context_mcp at least once before using other browser automation tools so you know what tabs exist.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "update_plan",
|
||||
description:
|
||||
"Present a plan to the user for approval before taking actions. The user will see the domains you intend to visit and your approach. Once approved, you can proceed with actions on the approved domains without additional permission prompts.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
domains: {
|
||||
type: "array" as const,
|
||||
items: { type: "string" as const },
|
||||
description:
|
||||
"List of domains you will visit (e.g., ['github.com', 'stackoverflow.com']). These domains will be approved for the session when the user accepts the plan.",
|
||||
},
|
||||
approach: {
|
||||
type: "array" as const,
|
||||
items: { type: "string" as const },
|
||||
description:
|
||||
"High-level description of what you will do. Focus on outcomes and key actions, not implementation details. Be concise - aim for 3-7 items.",
|
||||
},
|
||||
},
|
||||
required: ["domains", "approach"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "read_console_messages",
|
||||
description:
|
||||
"Read browser console messages (console.log, console.error, console.warn, etc.) from a specific tab. Useful for debugging JavaScript errors, viewing application logs, or understanding what's happening in the browser console. Returns console messages from the current domain only. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs. IMPORTANT: Always provide a pattern to filter messages - without a pattern, you may get too many irrelevant messages.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to read console messages from. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
onlyErrors: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"If true, only return error and exception messages. Default is false (return all message types).",
|
||||
},
|
||||
clear: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"If true, clear the console messages after reading to avoid duplicates on subsequent calls. Default is false.",
|
||||
},
|
||||
pattern: {
|
||||
type: "string",
|
||||
description:
|
||||
"Regex pattern to filter console messages. Only messages matching this pattern will be returned (e.g., 'error|warning' to find errors and warnings, 'MyApp' to filter app-specific logs). You should always provide a pattern to avoid getting too many irrelevant messages.",
|
||||
},
|
||||
limit: {
|
||||
type: "number",
|
||||
description:
|
||||
"Maximum number of messages to return. Defaults to 100. Increase only if you need more results.",
|
||||
},
|
||||
},
|
||||
required: ["tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "read_network_requests",
|
||||
description:
|
||||
"Read HTTP network requests (XHR, Fetch, documents, images, etc.) from a specific tab. Useful for debugging API calls, monitoring network activity, or understanding what requests a page is making. Returns all network requests made by the current page, including cross-origin requests. Requests are automatically cleared when the page navigates to a different domain. If you don't have a valid tab ID, use tabs_context_mcp first to get available tabs.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to read network requests from. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
urlPattern: {
|
||||
type: "string",
|
||||
description:
|
||||
"Optional URL pattern to filter requests. Only requests whose URL contains this string will be returned (e.g., '/api/' to filter API calls, 'example.com' to filter by domain).",
|
||||
},
|
||||
clear: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"If true, clear the network requests after reading to avoid duplicates on subsequent calls. Default is false.",
|
||||
},
|
||||
limit: {
|
||||
type: "number",
|
||||
description:
|
||||
"Maximum number of requests to return. Defaults to 100. Increase only if you need more results.",
|
||||
},
|
||||
},
|
||||
required: ["tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "shortcuts_list",
|
||||
description:
|
||||
"List all available shortcuts and workflows (shortcuts and workflows are interchangeable). Returns shortcuts with their commands, descriptions, and whether they are workflows. Use shortcuts_execute to run a shortcut or workflow.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to list shortcuts from. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
},
|
||||
required: ["tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "shortcuts_execute",
|
||||
description:
|
||||
"Execute a shortcut or workflow by running it in a new sidepanel window using the current tab (shortcuts and workflows are interchangeable). Use shortcuts_list first to see available shortcuts. This starts the execution and returns immediately - it does not wait for completion.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
tabId: {
|
||||
type: "number",
|
||||
description:
|
||||
"Tab ID to execute the shortcut on. Must be a tab in the current group. Use tabs_context_mcp first if you don't have a valid tab ID.",
|
||||
},
|
||||
shortcutId: {
|
||||
type: "string",
|
||||
description: "The ID of the shortcut to execute",
|
||||
},
|
||||
command: {
|
||||
type: "string",
|
||||
description:
|
||||
"The command name of the shortcut to execute (e.g., 'debug', 'summarize'). Do not include the leading slash.",
|
||||
},
|
||||
},
|
||||
required: ["tabId"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "switch_browser",
|
||||
description:
|
||||
"Switch which Chrome browser is used for browser automation. Call this when the user wants to connect to a different Chrome browser. Broadcasts a connection request to all Chrome browsers with the extension installed — the user clicks 'Connect' in the desired browser.",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
];
|
||||
15
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/index.ts
generated
vendored
15
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/index.ts
generated
vendored
@@ -1,15 +0,0 @@
|
||||
export { BridgeClient, createBridgeClient } from "./bridgeClient.js";
|
||||
export { BROWSER_TOOLS } from "./browserTools.js";
|
||||
export {
|
||||
createChromeSocketClient,
|
||||
createClaudeForChromeMcpServer,
|
||||
} from "./mcpServer.js";
|
||||
export { localPlatformLabel } from "./types.js";
|
||||
export type {
|
||||
BridgeConfig,
|
||||
ChromeExtensionInfo,
|
||||
ClaudeForChromeContext,
|
||||
Logger,
|
||||
PermissionMode,
|
||||
SocketClient,
|
||||
} from "./types.js";
|
||||
96
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/mcpServer.ts
generated
vendored
96
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/mcpServer.ts
generated
vendored
@@ -1,96 +0,0 @@
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import type { CallToolResult } from "@modelcontextprotocol/sdk/types.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
|
||||
import { createBridgeClient } from "./bridgeClient.js";
|
||||
import { BROWSER_TOOLS } from "./browserTools.js";
|
||||
import { createMcpSocketClient } from "./mcpSocketClient.js";
|
||||
import { createMcpSocketPool } from "./mcpSocketPool.js";
|
||||
import { handleToolCall } from "./toolCalls.js";
|
||||
import type { ClaudeForChromeContext, SocketClient } from "./types.js";
|
||||
|
||||
/**
|
||||
* Create the socket/bridge client for the Chrome extension MCP server.
|
||||
* Exported so Desktop can share a single instance between the registered
|
||||
* MCP server and the InternalMcpServerManager (CCD sessions).
|
||||
*/
|
||||
export function createChromeSocketClient(
|
||||
context: ClaudeForChromeContext,
|
||||
): SocketClient {
|
||||
return context.bridgeConfig
|
||||
? createBridgeClient(context)
|
||||
: context.getSocketPaths
|
||||
? createMcpSocketPool(context)
|
||||
: createMcpSocketClient(context);
|
||||
}
|
||||
|
||||
export function createClaudeForChromeMcpServer(
|
||||
context: ClaudeForChromeContext,
|
||||
existingSocketClient?: SocketClient,
|
||||
): Server {
|
||||
const { serverName, logger } = context;
|
||||
|
||||
// Choose transport: bridge (WebSocket) > socket pool (multi-profile) > single socket.
|
||||
const socketClient =
|
||||
existingSocketClient ?? createChromeSocketClient(context);
|
||||
|
||||
const server = new Server(
|
||||
{
|
||||
name: serverName,
|
||||
version: "1.0.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
logging: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
if (context.isDisabled?.()) {
|
||||
return { tools: [] };
|
||||
}
|
||||
return {
|
||||
tools: context.bridgeConfig
|
||||
? BROWSER_TOOLS
|
||||
: BROWSER_TOOLS.filter((t) => t.name !== "switch_browser"),
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(
|
||||
CallToolRequestSchema,
|
||||
async (request): Promise<CallToolResult> => {
|
||||
logger.info(`[${serverName}] Executing tool: ${request.params.name}`);
|
||||
|
||||
return handleToolCall(
|
||||
context,
|
||||
socketClient,
|
||||
request.params.name,
|
||||
request.params.arguments || {},
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
socketClient.setNotificationHandler((notification) => {
|
||||
logger.info(
|
||||
`[${serverName}] Forwarding MCP notification: ${notification.method}`,
|
||||
);
|
||||
server
|
||||
.notification({
|
||||
method: notification.method,
|
||||
params: notification.params,
|
||||
})
|
||||
.catch((error) => {
|
||||
// Server may not be connected yet (e.g., during startup or after disconnect)
|
||||
logger.info(
|
||||
`[${serverName}] Failed to forward MCP notification: ${error.message}`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
return server;
|
||||
}
|
||||
493
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/mcpSocketClient.ts
generated
vendored
493
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/mcpSocketClient.ts
generated
vendored
@@ -1,493 +0,0 @@
|
||||
import { promises as fsPromises } from "fs";
|
||||
import { createConnection } from "net";
|
||||
import type { Socket } from "net";
|
||||
import { platform } from "os";
|
||||
import { dirname } from "path";
|
||||
|
||||
import type {
|
||||
ClaudeForChromeContext,
|
||||
PermissionMode,
|
||||
PermissionOverrides,
|
||||
} from "./types.js";
|
||||
|
||||
export class SocketConnectionError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "SocketConnectionError";
|
||||
}
|
||||
}
|
||||
|
||||
interface ToolRequest {
|
||||
method: string; // "execute_tool"
|
||||
params?: {
|
||||
client_id?: string; // "desktop" | "claude-code"
|
||||
tool?: string;
|
||||
args?: Record<string, unknown>;
|
||||
};
|
||||
}
|
||||
|
||||
interface ToolResponse {
|
||||
result?: unknown;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface Notification {
|
||||
method: string;
|
||||
params?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
type SocketMessage = ToolResponse | Notification;
|
||||
|
||||
function isToolResponse(message: SocketMessage): message is ToolResponse {
|
||||
return "result" in message || "error" in message;
|
||||
}
|
||||
|
||||
function isNotification(message: SocketMessage): message is Notification {
|
||||
return "method" in message && typeof message.method === "string";
|
||||
}
|
||||
|
||||
class McpSocketClient {
|
||||
private socket: Socket | null = null;
|
||||
private connected = false;
|
||||
private connecting = false;
|
||||
private responseCallback: ((response: ToolResponse) => void) | null = null;
|
||||
private notificationHandler: ((notification: Notification) => void) | null =
|
||||
null;
|
||||
private responseBuffer = Buffer.alloc(0);
|
||||
private reconnectAttempts = 0;
|
||||
private maxReconnectAttempts = 10;
|
||||
private reconnectDelay = 1000;
|
||||
private reconnectTimer: NodeJS.Timeout | null = null;
|
||||
private context: ClaudeForChromeContext;
|
||||
// When true, disables automatic reconnection. Used by McpSocketPool which
|
||||
// manages reconnection externally by rescanning available sockets.
|
||||
public disableAutoReconnect = false;
|
||||
|
||||
constructor(context: ClaudeForChromeContext) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
private async connect(): Promise<void> {
|
||||
const { serverName, logger } = this.context;
|
||||
|
||||
if (this.connecting) {
|
||||
logger.info(
|
||||
`[${serverName}] Already connecting, skipping duplicate attempt`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
this.closeSocket();
|
||||
this.connecting = true;
|
||||
|
||||
const socketPath =
|
||||
this.context.getSocketPath?.() ?? this.context.socketPath;
|
||||
logger.info(`[${serverName}] Attempting to connect to: ${socketPath}`);
|
||||
|
||||
try {
|
||||
await this.validateSocketSecurity(socketPath);
|
||||
} catch (error) {
|
||||
this.connecting = false;
|
||||
logger.info(`[${serverName}] Security validation failed:`, error);
|
||||
// Don't retry on security failures (wrong perms/owner) - those won't
|
||||
// self-resolve. Only the error handler retries on transient errors.
|
||||
return;
|
||||
}
|
||||
|
||||
this.socket = createConnection(socketPath);
|
||||
|
||||
// Timeout the initial connection attempt - if socket file exists but native
|
||||
// host is dead, the connect can hang indefinitely
|
||||
const connectTimeout = setTimeout(() => {
|
||||
if (!this.connected) {
|
||||
logger.info(
|
||||
`[${serverName}] Connection attempt timed out after 5000ms`,
|
||||
);
|
||||
this.closeSocket();
|
||||
this.scheduleReconnect();
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
this.socket.on("connect", () => {
|
||||
clearTimeout(connectTimeout);
|
||||
this.connected = true;
|
||||
this.connecting = false;
|
||||
this.reconnectAttempts = 0;
|
||||
logger.info(`[${serverName}] Successfully connected to bridge server`);
|
||||
});
|
||||
|
||||
this.socket.on("data", (data: Buffer) => {
|
||||
this.responseBuffer = Buffer.concat([this.responseBuffer, data]);
|
||||
|
||||
while (this.responseBuffer.length >= 4) {
|
||||
const length = this.responseBuffer.readUInt32LE(0);
|
||||
|
||||
if (this.responseBuffer.length < 4 + length) {
|
||||
break;
|
||||
}
|
||||
|
||||
const messageBytes = this.responseBuffer.slice(4, 4 + length);
|
||||
this.responseBuffer = this.responseBuffer.slice(4 + length);
|
||||
|
||||
try {
|
||||
const message = JSON.parse(
|
||||
messageBytes.toString("utf-8"),
|
||||
) as SocketMessage;
|
||||
|
||||
if (isNotification(message)) {
|
||||
logger.info(
|
||||
`[${serverName}] Received notification: ${message.method}`,
|
||||
);
|
||||
if (this.notificationHandler) {
|
||||
this.notificationHandler(message);
|
||||
}
|
||||
} else if (isToolResponse(message)) {
|
||||
logger.info(`[${serverName}] Received tool response: ${message}`);
|
||||
this.handleResponse(message);
|
||||
} else {
|
||||
logger.info(`[${serverName}] Received unknown message: ${message}`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.info(`[${serverName}] Failed to parse message:`, error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on("error", (error: Error & { code?: string }) => {
|
||||
clearTimeout(connectTimeout);
|
||||
logger.info(`[${serverName}] Socket error (code: ${error.code}):`, error);
|
||||
this.connected = false;
|
||||
this.connecting = false;
|
||||
|
||||
if (
|
||||
error.code &&
|
||||
[
|
||||
"ECONNREFUSED", // Native host not listening (stale socket)
|
||||
"ECONNRESET", // Connection reset by peer
|
||||
"EPIPE", // Broken pipe (native host died mid-write)
|
||||
"ENOENT", // Socket file was deleted
|
||||
"EOPNOTSUPP", // Socket file exists but is not a valid socket
|
||||
"ECONNABORTED", // Connection aborted
|
||||
].includes(error.code)
|
||||
) {
|
||||
this.scheduleReconnect();
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on("close", () => {
|
||||
clearTimeout(connectTimeout);
|
||||
this.connected = false;
|
||||
this.connecting = false;
|
||||
this.scheduleReconnect();
|
||||
});
|
||||
}
|
||||
|
||||
private scheduleReconnect(): void {
|
||||
const { serverName, logger } = this.context;
|
||||
|
||||
if (this.disableAutoReconnect) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.reconnectTimer) {
|
||||
logger.info(`[${serverName}] Reconnect already scheduled, skipping`);
|
||||
return;
|
||||
}
|
||||
|
||||
this.reconnectAttempts++;
|
||||
|
||||
// Give up after extended polling (~50 min). A new ensureConnected() call
|
||||
// from a tool request will restart the cycle if needed.
|
||||
const maxTotalAttempts = 100;
|
||||
if (this.reconnectAttempts > maxTotalAttempts) {
|
||||
logger.info(
|
||||
`[${serverName}] Giving up after ${maxTotalAttempts} attempts. Will retry on next tool call.`,
|
||||
);
|
||||
this.reconnectAttempts = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Use aggressive backoff for first 10 attempts, then slow poll every 30s.
|
||||
const delay = Math.min(
|
||||
this.reconnectDelay * Math.pow(1.5, this.reconnectAttempts - 1),
|
||||
30000,
|
||||
);
|
||||
|
||||
if (this.reconnectAttempts <= this.maxReconnectAttempts) {
|
||||
logger.info(
|
||||
`[${serverName}] Reconnecting in ${Math.round(delay)}ms (attempt ${
|
||||
this.reconnectAttempts
|
||||
})`,
|
||||
);
|
||||
} else if (this.reconnectAttempts % 10 === 0) {
|
||||
// Log every 10th slow-poll attempt to avoid log spam
|
||||
logger.info(
|
||||
`[${serverName}] Still polling for native host (attempt ${this.reconnectAttempts})`,
|
||||
);
|
||||
}
|
||||
|
||||
this.reconnectTimer = setTimeout(() => {
|
||||
this.reconnectTimer = null;
|
||||
void this.connect();
|
||||
}, delay);
|
||||
}
|
||||
|
||||
private handleResponse(response: ToolResponse): void {
|
||||
if (this.responseCallback) {
|
||||
const callback = this.responseCallback;
|
||||
this.responseCallback = null;
|
||||
callback(response);
|
||||
}
|
||||
}
|
||||
|
||||
public setNotificationHandler(
|
||||
handler: (notification: Notification) => void,
|
||||
): void {
|
||||
this.notificationHandler = handler;
|
||||
}
|
||||
|
||||
public async ensureConnected(): Promise<boolean> {
|
||||
const { serverName } = this.context;
|
||||
|
||||
if (this.connected && this.socket) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!this.socket && !this.connecting) {
|
||||
await this.connect();
|
||||
}
|
||||
|
||||
// Wait for connection with timeout
|
||||
return new Promise((resolve, reject) => {
|
||||
let checkTimeoutId: NodeJS.Timeout | null = null;
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
if (checkTimeoutId) {
|
||||
clearTimeout(checkTimeoutId);
|
||||
}
|
||||
reject(
|
||||
new SocketConnectionError(
|
||||
`[${serverName}] Connection attempt timed out after 5000ms`,
|
||||
),
|
||||
);
|
||||
}, 5000);
|
||||
|
||||
const checkConnection = () => {
|
||||
if (this.connected) {
|
||||
clearTimeout(timeout);
|
||||
resolve(true);
|
||||
} else {
|
||||
checkTimeoutId = setTimeout(checkConnection, 500);
|
||||
}
|
||||
};
|
||||
checkConnection();
|
||||
});
|
||||
}
|
||||
|
||||
private async sendRequest(
|
||||
request: ToolRequest,
|
||||
timeoutMs = 30000,
|
||||
): Promise<ToolResponse> {
|
||||
const { serverName } = this.context;
|
||||
|
||||
if (!this.socket) {
|
||||
throw new SocketConnectionError(
|
||||
`[${serverName}] Cannot send request: not connected`,
|
||||
);
|
||||
}
|
||||
|
||||
const socket = this.socket;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timeout = setTimeout(() => {
|
||||
this.responseCallback = null;
|
||||
reject(
|
||||
new SocketConnectionError(
|
||||
`[${serverName}] Tool request timed out after ${timeoutMs}ms`,
|
||||
),
|
||||
);
|
||||
}, timeoutMs);
|
||||
|
||||
this.responseCallback = (response) => {
|
||||
clearTimeout(timeout);
|
||||
resolve(response);
|
||||
};
|
||||
|
||||
const requestJson = JSON.stringify(request);
|
||||
const requestBytes = Buffer.from(requestJson, "utf-8");
|
||||
|
||||
const lengthPrefix = Buffer.allocUnsafe(4);
|
||||
lengthPrefix.writeUInt32LE(requestBytes.length, 0);
|
||||
|
||||
const message = Buffer.concat([lengthPrefix, requestBytes]);
|
||||
socket.write(message);
|
||||
});
|
||||
}
|
||||
|
||||
public async callTool(
|
||||
name: string,
|
||||
args: Record<string, unknown>,
|
||||
_permissionOverrides?: PermissionOverrides,
|
||||
): Promise<unknown> {
|
||||
const request: ToolRequest = {
|
||||
method: "execute_tool",
|
||||
params: {
|
||||
client_id: this.context.clientTypeId,
|
||||
tool: name,
|
||||
args,
|
||||
},
|
||||
};
|
||||
|
||||
return this.sendRequestWithRetry(request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a request with automatic retry on connection errors.
|
||||
*
|
||||
* On connection error or timeout, the native host may be a zombie (connected
|
||||
* to dead Chrome). Force reconnect to pick up a fresh native host process
|
||||
* and retry once.
|
||||
*/
|
||||
private async sendRequestWithRetry(request: ToolRequest): Promise<unknown> {
|
||||
const { serverName, logger } = this.context;
|
||||
|
||||
try {
|
||||
return await this.sendRequest(request);
|
||||
} catch (error) {
|
||||
if (!(error instanceof SocketConnectionError)) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${serverName}] Connection error, forcing reconnect and retrying: ${error.message}`,
|
||||
);
|
||||
|
||||
this.closeSocket();
|
||||
await this.ensureConnected();
|
||||
|
||||
return await this.sendRequest(request);
|
||||
}
|
||||
}
|
||||
|
||||
public async setPermissionMode(
|
||||
_mode: PermissionMode,
|
||||
_allowedDomains?: string[],
|
||||
): Promise<void> {
|
||||
// No-op: permission mode is only supported over the bridge (WebSocket) transport
|
||||
}
|
||||
|
||||
public isConnected(): boolean {
|
||||
return this.connected;
|
||||
}
|
||||
|
||||
private closeSocket(): void {
|
||||
if (this.socket) {
|
||||
this.socket.removeAllListeners();
|
||||
this.socket.end();
|
||||
this.socket.destroy();
|
||||
this.socket = null;
|
||||
}
|
||||
this.connected = false;
|
||||
this.connecting = false;
|
||||
}
|
||||
|
||||
private cleanup(): void {
|
||||
if (this.reconnectTimer) {
|
||||
clearTimeout(this.reconnectTimer);
|
||||
this.reconnectTimer = null;
|
||||
}
|
||||
|
||||
this.closeSocket();
|
||||
this.reconnectAttempts = 0;
|
||||
this.responseBuffer = Buffer.alloc(0);
|
||||
this.responseCallback = null;
|
||||
}
|
||||
|
||||
public disconnect(): void {
|
||||
this.cleanup();
|
||||
}
|
||||
|
||||
private async validateSocketSecurity(socketPath: string): Promise<void> {
|
||||
const { serverName, logger } = this.context;
|
||||
if (platform() === "win32") {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
// Validate the parent directory permissions if it's the socket directory
|
||||
// (not /tmp itself, which has mode 1777 for legacy single-socket paths)
|
||||
const dirPath = dirname(socketPath);
|
||||
const dirBasename = dirPath.split("/").pop() || "";
|
||||
const isSocketDir = dirBasename.startsWith("claude-mcp-browser-bridge-");
|
||||
if (isSocketDir) {
|
||||
try {
|
||||
const dirStats = await fsPromises.stat(dirPath);
|
||||
if (dirStats.isDirectory()) {
|
||||
const dirMode = dirStats.mode & 0o777;
|
||||
if (dirMode !== 0o700) {
|
||||
throw new Error(
|
||||
`[${serverName}] Insecure socket directory permissions: ${dirMode.toString(
|
||||
8,
|
||||
)} (expected 0700). Directory may have been tampered with.`,
|
||||
);
|
||||
}
|
||||
const currentUid = process.getuid?.();
|
||||
if (currentUid !== undefined && dirStats.uid !== currentUid) {
|
||||
throw new Error(
|
||||
`Socket directory not owned by current user (uid: ${currentUid}, dir uid: ${dirStats.uid}). ` +
|
||||
`Potential security risk.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (dirError) {
|
||||
if ((dirError as NodeJS.ErrnoException).code !== "ENOENT") {
|
||||
throw dirError;
|
||||
}
|
||||
// Directory doesn't exist yet - native host will create it
|
||||
}
|
||||
}
|
||||
|
||||
const stats = await fsPromises.stat(socketPath);
|
||||
|
||||
if (!stats.isSocket()) {
|
||||
throw new Error(
|
||||
`[${serverName}] Path exists but it's not a socket: ${socketPath}`,
|
||||
);
|
||||
}
|
||||
|
||||
const mode = stats.mode & 0o777;
|
||||
if (mode !== 0o600) {
|
||||
throw new Error(
|
||||
`[${serverName}] Insecure socket permissions: ${mode.toString(
|
||||
8,
|
||||
)} (expected 0600). Socket may have been tampered with.`,
|
||||
);
|
||||
}
|
||||
|
||||
const currentUid = process.getuid?.();
|
||||
if (currentUid !== undefined && stats.uid !== currentUid) {
|
||||
throw new Error(
|
||||
`Socket not owned by current user (uid: ${currentUid}, socket uid: ${stats.uid}). ` +
|
||||
`Potential security risk.`,
|
||||
);
|
||||
}
|
||||
|
||||
logger.info(`[${serverName}] Socket security validation passed`);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
logger.info(
|
||||
`[${serverName}] Socket not found, will be created by server`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function createMcpSocketClient(
|
||||
context: ClaudeForChromeContext,
|
||||
): McpSocketClient {
|
||||
return new McpSocketClient(context);
|
||||
}
|
||||
|
||||
export type { McpSocketClient };
|
||||
327
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/mcpSocketPool.ts
generated
vendored
327
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/mcpSocketPool.ts
generated
vendored
@@ -1,327 +0,0 @@
|
||||
import {
|
||||
createMcpSocketClient,
|
||||
SocketConnectionError,
|
||||
} from "./mcpSocketClient.js";
|
||||
import type { McpSocketClient } from "./mcpSocketClient.js";
|
||||
import type {
|
||||
ClaudeForChromeContext,
|
||||
PermissionMode,
|
||||
PermissionOverrides,
|
||||
} from "./types.js";
|
||||
|
||||
/**
|
||||
* Manages connections to multiple Chrome native host sockets (one per Chrome profile).
|
||||
* Routes tool calls to the correct socket based on tab ID.
|
||||
*
|
||||
* For `tabs_context_mcp`: queries all connected sockets and merges results.
|
||||
* For other tools: routes based on the `tabId` argument using a routing table
|
||||
* built from tabs_context_mcp responses.
|
||||
*/
|
||||
export class McpSocketPool {
|
||||
private clients: Map<string, McpSocketClient> = new Map();
|
||||
private tabRoutes: Map<number, string> = new Map();
|
||||
private context: ClaudeForChromeContext;
|
||||
private notificationHandler:
|
||||
| ((notification: { method: string; params?: Record<string, unknown> }) => void)
|
||||
| null = null;
|
||||
|
||||
constructor(context: ClaudeForChromeContext) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public setNotificationHandler(
|
||||
handler: (notification: {
|
||||
method: string;
|
||||
params?: Record<string, unknown>;
|
||||
}) => void,
|
||||
): void {
|
||||
this.notificationHandler = handler;
|
||||
for (const client of this.clients.values()) {
|
||||
client.setNotificationHandler(handler);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover available sockets and ensure at least one is connected.
|
||||
*/
|
||||
public async ensureConnected(): Promise<boolean> {
|
||||
const { logger, serverName } = this.context;
|
||||
|
||||
this.refreshClients();
|
||||
|
||||
// Try to connect any disconnected clients
|
||||
const connectPromises: Promise<boolean>[] = [];
|
||||
for (const client of this.clients.values()) {
|
||||
if (!client.isConnected()) {
|
||||
connectPromises.push(
|
||||
client.ensureConnected().catch(() => false),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (connectPromises.length > 0) {
|
||||
await Promise.all(connectPromises);
|
||||
}
|
||||
|
||||
const connectedCount = this.getConnectedClients().length;
|
||||
if (connectedCount === 0) {
|
||||
logger.info(`[${serverName}] No connected sockets in pool`);
|
||||
return false;
|
||||
}
|
||||
|
||||
logger.info(`[${serverName}] Socket pool: ${connectedCount} connected`);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call a tool, routing to the correct socket based on tab ID.
|
||||
* For tabs_context_mcp, queries all sockets and merges results.
|
||||
*/
|
||||
public async callTool(
|
||||
name: string,
|
||||
args: Record<string, unknown>,
|
||||
_permissionOverrides?: PermissionOverrides,
|
||||
): Promise<unknown> {
|
||||
if (name === "tabs_context_mcp") {
|
||||
return this.callTabsContext(args);
|
||||
}
|
||||
|
||||
// Route by tabId if present
|
||||
const tabId = args.tabId as number | undefined;
|
||||
if (tabId !== undefined) {
|
||||
const socketPath = this.tabRoutes.get(tabId);
|
||||
if (socketPath) {
|
||||
const client = this.clients.get(socketPath);
|
||||
if (client?.isConnected()) {
|
||||
return client.callTool(name, args);
|
||||
}
|
||||
}
|
||||
// Tab route not found or client disconnected — fall through to any connected
|
||||
}
|
||||
|
||||
// Fallback: use first connected client
|
||||
const connected = this.getConnectedClients();
|
||||
if (connected.length === 0) {
|
||||
throw new SocketConnectionError(
|
||||
`[${this.context.serverName}] No connected sockets available`,
|
||||
);
|
||||
}
|
||||
return connected[0]!.callTool(name, args);
|
||||
}
|
||||
|
||||
public async setPermissionMode(
|
||||
mode: PermissionMode,
|
||||
allowedDomains?: string[],
|
||||
): Promise<void> {
|
||||
const connected = this.getConnectedClients();
|
||||
await Promise.all(
|
||||
connected.map((client) => client.setPermissionMode(mode, allowedDomains)),
|
||||
);
|
||||
}
|
||||
|
||||
public isConnected(): boolean {
|
||||
return this.getConnectedClients().length > 0;
|
||||
}
|
||||
|
||||
public disconnect(): void {
|
||||
for (const client of this.clients.values()) {
|
||||
client.disconnect();
|
||||
}
|
||||
this.clients.clear();
|
||||
this.tabRoutes.clear();
|
||||
}
|
||||
|
||||
private getConnectedClients(): McpSocketClient[] {
|
||||
return [...this.clients.values()].filter((c) => c.isConnected());
|
||||
}
|
||||
|
||||
/**
|
||||
* Query all connected sockets for tabs and merge results.
|
||||
* Updates the tab routing table.
|
||||
*/
|
||||
private async callTabsContext(
|
||||
args: Record<string, unknown>,
|
||||
): Promise<unknown> {
|
||||
const { logger, serverName } = this.context;
|
||||
const connected = this.getConnectedClients();
|
||||
|
||||
if (connected.length === 0) {
|
||||
throw new SocketConnectionError(
|
||||
`[${serverName}] No connected sockets available`,
|
||||
);
|
||||
}
|
||||
|
||||
// If only one client, skip merging overhead
|
||||
if (connected.length === 1) {
|
||||
const result = await connected[0]!.callTool("tabs_context_mcp", args);
|
||||
this.updateTabRoutes(result, this.getSocketPathForClient(connected[0]!));
|
||||
return result;
|
||||
}
|
||||
|
||||
// Query all connected clients in parallel
|
||||
const results = await Promise.allSettled(
|
||||
connected.map(async (client) => {
|
||||
const result = await client.callTool("tabs_context_mcp", args);
|
||||
const socketPath = this.getSocketPathForClient(client);
|
||||
return { result, socketPath };
|
||||
}),
|
||||
);
|
||||
|
||||
// Merge tab results
|
||||
const mergedTabs: unknown[] = [];
|
||||
this.tabRoutes.clear();
|
||||
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status !== "fulfilled") {
|
||||
logger.info(
|
||||
`[${serverName}] tabs_context_mcp failed on one socket: ${settledResult.reason}`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { result, socketPath } = settledResult.value;
|
||||
this.updateTabRoutes(result, socketPath);
|
||||
|
||||
const tabs = this.extractTabs(result);
|
||||
if (tabs) {
|
||||
mergedTabs.push(...tabs);
|
||||
}
|
||||
}
|
||||
|
||||
// Return merged result in the same format as the extension response
|
||||
if (mergedTabs.length > 0) {
|
||||
const tabListText = mergedTabs
|
||||
.map((t) => {
|
||||
const tab = t as { tabId: number; title: string; url: string };
|
||||
return ` • tabId ${tab.tabId}: "${tab.title}" (${tab.url})`;
|
||||
})
|
||||
.join("\n");
|
||||
|
||||
return {
|
||||
result: {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: JSON.stringify({ availableTabs: mergedTabs }),
|
||||
},
|
||||
{
|
||||
type: "text",
|
||||
text: `\n\nTab Context:\n- Available tabs:\n${tabListText}`,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback: return first successful result as-is
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status === "fulfilled") {
|
||||
return settledResult.value.result;
|
||||
}
|
||||
}
|
||||
|
||||
throw new SocketConnectionError(
|
||||
`[${serverName}] All sockets failed for tabs_context_mcp`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract tab objects from a tool response to update routing table.
|
||||
*/
|
||||
private updateTabRoutes(result: unknown, socketPath: string): void {
|
||||
const tabs = this.extractTabs(result);
|
||||
if (!tabs) return;
|
||||
|
||||
for (const tab of tabs) {
|
||||
if (typeof tab === "object" && tab !== null && "tabId" in tab) {
|
||||
const tabId = (tab as { tabId: number }).tabId;
|
||||
this.tabRoutes.set(tabId, socketPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractTabs(result: unknown): unknown[] | null {
|
||||
if (!result || typeof result !== "object") return null;
|
||||
|
||||
// Response format: { result: { content: [{ type: "text", text: "{\"availableTabs\":[...],\"tabGroupId\":...}" }] } }
|
||||
const asResponse = result as {
|
||||
result?: { content?: Array<{ type: string; text?: string }> };
|
||||
};
|
||||
const content = asResponse.result?.content;
|
||||
if (!content || !Array.isArray(content)) return null;
|
||||
|
||||
for (const item of content) {
|
||||
if (item.type === "text" && item.text) {
|
||||
try {
|
||||
const parsed = JSON.parse(item.text);
|
||||
if (Array.isArray(parsed)) return parsed;
|
||||
// Handle { availableTabs: [...] } format
|
||||
if (parsed && Array.isArray(parsed.availableTabs)) {
|
||||
return parsed.availableTabs;
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, skip
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private getSocketPathForClient(client: McpSocketClient): string {
|
||||
for (const [path, c] of this.clients.entries()) {
|
||||
if (c === client) return path;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan for available sockets and create/remove clients as needed.
|
||||
*/
|
||||
private refreshClients(): void {
|
||||
const socketPaths = this.getAvailableSocketPaths();
|
||||
const { logger, serverName } = this.context;
|
||||
|
||||
// Add new clients for newly discovered sockets
|
||||
for (const path of socketPaths) {
|
||||
if (!this.clients.has(path)) {
|
||||
logger.info(`[${serverName}] Adding socket to pool: ${path}`);
|
||||
const clientContext: ClaudeForChromeContext = {
|
||||
...this.context,
|
||||
socketPath: path,
|
||||
getSocketPath: undefined,
|
||||
getSocketPaths: undefined,
|
||||
};
|
||||
const client = createMcpSocketClient(clientContext);
|
||||
client.disableAutoReconnect = true;
|
||||
if (this.notificationHandler) {
|
||||
client.setNotificationHandler(this.notificationHandler);
|
||||
}
|
||||
this.clients.set(path, client);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove clients for sockets that no longer exist
|
||||
for (const [path, client] of this.clients.entries()) {
|
||||
if (!socketPaths.includes(path)) {
|
||||
logger.info(`[${serverName}] Removing stale socket from pool: ${path}`);
|
||||
client.disconnect();
|
||||
this.clients.delete(path);
|
||||
for (const [tabId, socketPath] of this.tabRoutes.entries()) {
|
||||
if (socketPath === path) {
|
||||
this.tabRoutes.delete(tabId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getAvailableSocketPaths(): string[] {
|
||||
return this.context.getSocketPaths?.() ?? [];
|
||||
}
|
||||
}
|
||||
|
||||
export function createMcpSocketPool(
|
||||
context: ClaudeForChromeContext,
|
||||
): McpSocketPool {
|
||||
return new McpSocketPool(context);
|
||||
}
|
||||
301
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/toolCalls.ts
generated
vendored
301
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/toolCalls.ts
generated
vendored
@@ -1,301 +0,0 @@
|
||||
import type { CallToolResult } from "@modelcontextprotocol/sdk/types.js";
|
||||
|
||||
import { SocketConnectionError } from "./mcpSocketClient.js";
|
||||
import type {
|
||||
ClaudeForChromeContext,
|
||||
PermissionMode,
|
||||
PermissionOverrides,
|
||||
SocketClient,
|
||||
} from "./types.js";
|
||||
|
||||
export const handleToolCall = async (
|
||||
context: ClaudeForChromeContext,
|
||||
socketClient: SocketClient,
|
||||
name: string,
|
||||
args: Record<string, unknown>,
|
||||
permissionOverrides?: PermissionOverrides,
|
||||
): Promise<CallToolResult> => {
|
||||
// Handle permission mode changes locally (not forwarded to extension)
|
||||
if (name === "set_permission_mode") {
|
||||
return handleSetPermissionMode(socketClient, args);
|
||||
}
|
||||
|
||||
// Handle switch_browser outside the normal tool call flow (manages its own connection)
|
||||
if (name === "switch_browser") {
|
||||
return handleSwitchBrowser(context, socketClient);
|
||||
}
|
||||
|
||||
try {
|
||||
const isConnected = await socketClient.ensureConnected();
|
||||
|
||||
context.logger.silly(
|
||||
`[${context.serverName}] Server is connected: ${isConnected}. Received tool call: ${name} with args: ${JSON.stringify(args)}.`,
|
||||
);
|
||||
|
||||
if (isConnected) {
|
||||
return await handleToolCallConnected(
|
||||
context,
|
||||
socketClient,
|
||||
name,
|
||||
args,
|
||||
permissionOverrides,
|
||||
);
|
||||
}
|
||||
|
||||
return handleToolCallDisconnected(context);
|
||||
} catch (error) {
|
||||
context.logger.info(`[${context.serverName}] Error calling tool:`, error);
|
||||
|
||||
if (error instanceof SocketConnectionError) {
|
||||
return handleToolCallDisconnected(context);
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: `Error calling tool, please try again. : ${error instanceof Error ? error.message : String(error)}`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
async function handleToolCallConnected(
|
||||
context: ClaudeForChromeContext,
|
||||
socketClient: SocketClient,
|
||||
name: string,
|
||||
args: Record<string, unknown>,
|
||||
permissionOverrides?: PermissionOverrides,
|
||||
): Promise<CallToolResult> {
|
||||
const response = await socketClient.callTool(name, args, permissionOverrides);
|
||||
|
||||
context.logger.silly(
|
||||
`[${context.serverName}] Received result from socket bridge: ${JSON.stringify(response)}`,
|
||||
);
|
||||
|
||||
if (response === null || response === undefined) {
|
||||
return {
|
||||
content: [{ type: "text", text: "Tool execution completed" }],
|
||||
};
|
||||
}
|
||||
|
||||
// Response will have either result or error field
|
||||
const { result, error } = response as {
|
||||
result?: { content: unknown[] | string };
|
||||
error?: { content: unknown[] | string };
|
||||
};
|
||||
|
||||
// Determine which field has the content and whether it's an error
|
||||
const contentData = error || result;
|
||||
const isError = !!error;
|
||||
|
||||
if (!contentData) {
|
||||
return {
|
||||
content: [{ type: "text", text: "Tool execution completed" }],
|
||||
};
|
||||
}
|
||||
|
||||
if (isError && isAuthenticationError(contentData.content)) {
|
||||
context.onAuthenticationError();
|
||||
}
|
||||
|
||||
const { content } = contentData;
|
||||
|
||||
if (content && Array.isArray(content)) {
|
||||
if (isError) {
|
||||
return {
|
||||
content: content.map((item: unknown) => {
|
||||
if (typeof item === "object" && item !== null && "type" in item) {
|
||||
return item;
|
||||
}
|
||||
|
||||
return { type: "text", text: String(item) };
|
||||
}),
|
||||
isError: true,
|
||||
} as CallToolResult;
|
||||
}
|
||||
|
||||
const convertedContent = content.map((item: unknown) => {
|
||||
if (
|
||||
typeof item === "object" &&
|
||||
item !== null &&
|
||||
"type" in item &&
|
||||
"source" in item
|
||||
) {
|
||||
const typedItem = item;
|
||||
if (
|
||||
typedItem.type === "image" &&
|
||||
typeof typedItem.source === "object" &&
|
||||
typedItem.source !== null &&
|
||||
"data" in typedItem.source
|
||||
) {
|
||||
return {
|
||||
type: "image",
|
||||
data: typedItem.source.data,
|
||||
mimeType:
|
||||
"media_type" in typedItem.source
|
||||
? typedItem.source.media_type || "image/png"
|
||||
: "image/png",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof item === "object" && item !== null && "type" in item) {
|
||||
return item;
|
||||
}
|
||||
|
||||
return { type: "text", text: String(item) };
|
||||
});
|
||||
|
||||
return {
|
||||
content: convertedContent,
|
||||
isError,
|
||||
} as CallToolResult;
|
||||
}
|
||||
|
||||
// Handle string content
|
||||
if (typeof content === "string") {
|
||||
return {
|
||||
content: [{ type: "text", text: content }],
|
||||
isError,
|
||||
} as CallToolResult;
|
||||
}
|
||||
|
||||
// Fallback for unexpected result format
|
||||
context.logger.warn(
|
||||
`[${context.serverName}] Unexpected result format from socket bridge`,
|
||||
response,
|
||||
);
|
||||
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(response) }],
|
||||
isError,
|
||||
};
|
||||
}
|
||||
|
||||
function handleToolCallDisconnected(
|
||||
context: ClaudeForChromeContext,
|
||||
): CallToolResult {
|
||||
const text = context.onToolCallDisconnected();
|
||||
return {
|
||||
content: [{ type: "text", text }],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle set_permission_mode tool call locally.
|
||||
* This is security-sensitive as it controls whether permission prompts are shown.
|
||||
*/
|
||||
async function handleSetPermissionMode(
|
||||
socketClient: SocketClient,
|
||||
args: Record<string, unknown>,
|
||||
): Promise<CallToolResult> {
|
||||
// Validate permission mode at runtime
|
||||
const validModes = [
|
||||
"ask",
|
||||
"skip_all_permission_checks",
|
||||
"follow_a_plan",
|
||||
] as const;
|
||||
const mode = args.mode as string | undefined;
|
||||
const permissionMode: PermissionMode =
|
||||
mode && validModes.includes(mode as PermissionMode)
|
||||
? (mode as PermissionMode)
|
||||
: "ask";
|
||||
|
||||
if (socketClient.setPermissionMode) {
|
||||
await socketClient.setPermissionMode(
|
||||
permissionMode,
|
||||
args.allowed_domains as string[] | undefined,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Permission mode set to: ${permissionMode}` },
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle switch_browser tool call. Broadcasts a pairing request and blocks
|
||||
* until a browser responds or timeout.
|
||||
*/
|
||||
async function handleSwitchBrowser(
|
||||
context: ClaudeForChromeContext,
|
||||
socketClient: SocketClient,
|
||||
): Promise<CallToolResult> {
|
||||
if (!context.bridgeConfig) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "Browser switching is only available with bridge connections.",
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
const isConnected = await socketClient.ensureConnected();
|
||||
if (!isConnected) {
|
||||
return handleToolCallDisconnected(context);
|
||||
}
|
||||
|
||||
const result = (await socketClient.switchBrowser?.()) ?? null;
|
||||
|
||||
if (result === "no_other_browsers") {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "No other browsers available to switch to. Open Chrome with the Claude extension in another browser to switch.",
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
if (result) {
|
||||
return {
|
||||
content: [
|
||||
{ type: "text", text: `Connected to browser "${result.name}".` },
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "No browser responded within the timeout. Make sure Chrome is open with the Claude extension installed, then try again.",
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the error content indicates an authentication issue
|
||||
*/
|
||||
function isAuthenticationError(content: unknown[] | string): boolean {
|
||||
const errorText = Array.isArray(content)
|
||||
? content
|
||||
.map((item) => {
|
||||
if (typeof item === "string") return item;
|
||||
if (
|
||||
typeof item === "object" &&
|
||||
item !== null &&
|
||||
"text" in item &&
|
||||
typeof item.text === "string"
|
||||
) {
|
||||
return item.text;
|
||||
}
|
||||
return "";
|
||||
})
|
||||
.join(" ")
|
||||
: String(content);
|
||||
|
||||
return errorText.toLowerCase().includes("re-authenticated");
|
||||
}
|
||||
134
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/types.ts
generated
vendored
134
extracted-source/node_modules/@ant/claude-for-chrome-mcp/src/types.ts
generated
vendored
@@ -1,134 +0,0 @@
|
||||
export interface Logger {
|
||||
info: (message: string, ...args: unknown[]) => void;
|
||||
error: (message: string, ...args: unknown[]) => void;
|
||||
warn: (message: string, ...args: unknown[]) => void;
|
||||
debug: (message: string, ...args: unknown[]) => void;
|
||||
silly: (message: string, ...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
export type PermissionMode =
|
||||
| "ask"
|
||||
| "skip_all_permission_checks"
|
||||
| "follow_a_plan";
|
||||
|
||||
export interface BridgeConfig {
|
||||
/** Bridge WebSocket base URL (e.g., wss://bridge.claudeusercontent.com) */
|
||||
url: string;
|
||||
/** Returns the user's account UUID for the connection path */
|
||||
getUserId: () => Promise<string | undefined>;
|
||||
/** Returns a valid OAuth token for bridge authentication */
|
||||
getOAuthToken: () => Promise<string | undefined>;
|
||||
/** Optional dev user ID for local development (bypasses OAuth) */
|
||||
devUserId?: string;
|
||||
}
|
||||
|
||||
/** Metadata about a connected Chrome extension instance. */
|
||||
export interface ChromeExtensionInfo {
|
||||
deviceId: string;
|
||||
osPlatform?: string;
|
||||
connectedAt: number;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export interface ClaudeForChromeContext {
|
||||
serverName: string;
|
||||
logger: Logger;
|
||||
socketPath: string;
|
||||
// Optional dynamic resolver for socket path. When provided, called on each
|
||||
// connection attempt to handle runtime conditions (e.g., TMPDIR mismatch).
|
||||
getSocketPath?: () => string;
|
||||
// Optional resolver returning all available socket paths (for multi-profile support).
|
||||
// When provided, a socket pool connects to all sockets and routes by tab ID.
|
||||
getSocketPaths?: () => string[];
|
||||
clientTypeId: string; // "desktop" | "claude-code"
|
||||
onToolCallDisconnected: () => string;
|
||||
onAuthenticationError: () => void;
|
||||
isDisabled?: () => boolean;
|
||||
/** Bridge WebSocket configuration. When provided, uses bridge instead of socket. */
|
||||
bridgeConfig?: BridgeConfig;
|
||||
/** If set, permission mode is sent to the extension immediately on bridge connection. */
|
||||
initialPermissionMode?: PermissionMode;
|
||||
/** Optional callback to track telemetry events for bridge connections */
|
||||
trackEvent?: <K extends string>(
|
||||
eventName: K,
|
||||
metadata: Record<string, unknown> | null,
|
||||
) => void;
|
||||
/** Called when user pairs with an extension via the browser pairing flow. */
|
||||
onExtensionPaired?: (deviceId: string, name: string) => void;
|
||||
/** Returns the previously paired deviceId, if any. */
|
||||
getPersistedDeviceId?: () => string | undefined;
|
||||
/** Called when a remote extension is auto-selected (only option available). */
|
||||
onRemoteExtensionWarning?: (ext: ChromeExtensionInfo) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map Node's process.platform to the platform string reported by Chrome extensions
|
||||
* via navigator.userAgentData.platform.
|
||||
*/
|
||||
export function localPlatformLabel(): string {
|
||||
return process.platform === "darwin"
|
||||
? "macOS"
|
||||
: process.platform === "win32"
|
||||
? "Windows"
|
||||
: "Linux";
|
||||
}
|
||||
|
||||
/** Permission request forwarded from the extension to the desktop for user approval. */
|
||||
export interface BridgePermissionRequest {
|
||||
/** Links to the pending tool_call */
|
||||
toolUseId: string;
|
||||
/** Unique ID for this permission request */
|
||||
requestId: string;
|
||||
/** Tool type, e.g. "navigate", "click", "execute_javascript" */
|
||||
toolType: string;
|
||||
/** The URL/domain context */
|
||||
url: string;
|
||||
/** Additional action data (click coordinates, text, etc.) */
|
||||
actionData?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/** Desktop response to a bridge permission request. */
|
||||
export interface BridgePermissionResponse {
|
||||
requestId: string;
|
||||
allowed: boolean;
|
||||
}
|
||||
|
||||
/** Per-call permission overrides, allowing each session to use its own permission state. */
|
||||
export interface PermissionOverrides {
|
||||
permissionMode: PermissionMode;
|
||||
allowedDomains?: string[];
|
||||
/** Callback invoked when the extension requests user permission via the bridge. */
|
||||
onPermissionRequest?: (request: BridgePermissionRequest) => Promise<boolean>;
|
||||
}
|
||||
|
||||
/** Shared interface for McpSocketClient and McpSocketPool */
|
||||
export interface SocketClient {
|
||||
ensureConnected(): Promise<boolean>;
|
||||
callTool(
|
||||
name: string,
|
||||
args: Record<string, unknown>,
|
||||
permissionOverrides?: PermissionOverrides,
|
||||
): Promise<unknown>;
|
||||
isConnected(): boolean;
|
||||
disconnect(): void;
|
||||
setNotificationHandler(
|
||||
handler: (notification: {
|
||||
method: string;
|
||||
params?: Record<string, unknown>;
|
||||
}) => void,
|
||||
): void;
|
||||
/** Set permission mode for the current session. Only effective on BridgeClient. */
|
||||
setPermissionMode?(
|
||||
mode: PermissionMode,
|
||||
allowedDomains?: string[],
|
||||
): Promise<void>;
|
||||
/** Switch to a different browser. Only available on BridgeClient. */
|
||||
switchBrowser?(): Promise<
|
||||
| {
|
||||
deviceId: string;
|
||||
name: string;
|
||||
}
|
||||
| "no_other_browsers"
|
||||
| null
|
||||
>;
|
||||
}
|
||||
25
extracted-source/node_modules/@ant/computer-use-input/js/index.js
generated
vendored
25
extracted-source/node_modules/@ant/computer-use-input/js/index.js
generated
vendored
@@ -1,25 +0,0 @@
|
||||
const path = require("path");
|
||||
|
||||
// Discriminated union: { isSupported: false } on non-darwin,
|
||||
// { isSupported: true, ...nativeFns } on darwin. Cross-platform consumers
|
||||
// (claude-cli-internal) require() unconditionally and narrow on isSupported.
|
||||
if (process.platform !== "darwin") {
|
||||
module.exports = { isSupported: false };
|
||||
} else {
|
||||
// COMPUTER_USE_INPUT_NODE_PATH: escape hatch for bundlers. Bun's --compile
|
||||
// embeds the .node as an asset, not in a node_modules tree — __dirname is
|
||||
// the exe dir and ../prebuilds/ doesn't exist. The consuming build bakes
|
||||
// this var to the embedded asset's path. Unset → normal node_modules layout.
|
||||
//
|
||||
// key()/keys() dispatch enigo work onto DispatchQueue.main via
|
||||
// dispatch2::run_on_main, then block a tokio worker on a channel. Under
|
||||
// Electron (CFRunLoop drains the main queue) this works; under libuv
|
||||
// (Node/bun) the main queue never drains and the promise hangs. Consumers
|
||||
// running under libuv must pump CFRunLoop while key()/keys() are pending —
|
||||
// e.g. claude-cli-internal borrows @ant/computer-use-swift's _drainMainRunLoop.
|
||||
const native = require(
|
||||
process.env.COMPUTER_USE_INPUT_NODE_PATH ??
|
||||
path.resolve(__dirname, "../prebuilds/computer-use-input.node"),
|
||||
);
|
||||
module.exports = { isSupported: true, ...native };
|
||||
}
|
||||
553
extracted-source/node_modules/@ant/computer-use-mcp/src/deniedApps.ts
generated
vendored
553
extracted-source/node_modules/@ant/computer-use-mcp/src/deniedApps.ts
generated
vendored
@@ -1,553 +0,0 @@
|
||||
/**
|
||||
* App category lookup for tiered CU permissions. Three categories land at a
|
||||
* restricted tier instead of `"full"`:
|
||||
*
|
||||
* - **browser** → `"read"` tier — visible in screenshots, NO interaction.
|
||||
* The model can read an already-open page but must use the Claude-in-Chrome
|
||||
* MCP for navigation/clicking/typing.
|
||||
* - **terminal** → `"click"` tier — visible + clickable, NO typing. The
|
||||
* model can click a Run button or scroll test output in an IDE, but can't
|
||||
* type into the integrated terminal. Use the Bash tool for shell work.
|
||||
* - **trading** → `"read"` tier — same restrictions as browsers, but no
|
||||
* CiC-MCP alternative exists. For platforms where a stray click can
|
||||
* execute a trade or send a message to a counterparty.
|
||||
*
|
||||
* Uncategorized apps default to `"full"`. See `getDefaultTierForApp`.
|
||||
*
|
||||
* Identification is two-layered:
|
||||
* 1. Bundle ID match (macOS-only; `InstalledApp.bundleId` is a
|
||||
* CFBundleIdentifier and meaningless on Windows). Fast, exact, the
|
||||
* primary mechanism while CU is darwin-gated.
|
||||
* 2. Display-name substring match (cross-platform fallback). Catches
|
||||
* unresolved requests ("Chrome" when Chrome isn't installed) AND will
|
||||
* be the primary mechanism on Windows/Linux where there's no bundle ID.
|
||||
* Windows-relevant names (PowerShell, cmd, Windows Terminal) are
|
||||
* included now so they activate the moment the darwin gate lifts.
|
||||
*
|
||||
* Keep this file **import-free** (like sentinelApps.ts) — the renderer may
|
||||
* import it via a package.json subpath export, and pulling in
|
||||
* `@modelcontextprotocol/sdk` (a devDep) through the index → mcpServer chain
|
||||
* would fail module resolution in Next.js. The `CuAppPermTier` type is
|
||||
* duplicated as a string literal below rather than imported.
|
||||
*/
|
||||
|
||||
export type DeniedCategory = "browser" | "terminal" | "trading";
|
||||
|
||||
/**
|
||||
* Map a category to its hardcoded tier. Return-type is the string-literal
|
||||
* union inline (this file is import-free; see header comment). The
|
||||
* authoritative type is `CuAppPermTier` in types.ts — keep in sync.
|
||||
*
|
||||
* Not bijective — both `"browser"` and `"trading"` map to `"read"`. Copy
|
||||
* that differs by category (the "use CiC" hint is browser-only) must check
|
||||
* the category, not just the tier.
|
||||
*/
|
||||
export function categoryToTier(
|
||||
category: DeniedCategory | null,
|
||||
): "read" | "click" | "full" {
|
||||
if (category === "browser" || category === "trading") return "read";
|
||||
if (category === "terminal") return "click";
|
||||
return "full";
|
||||
}
|
||||
|
||||
// ─── Bundle-ID deny sets (macOS) ─────────────────────────────────────────
|
||||
|
||||
const BROWSER_BUNDLE_IDS: ReadonlySet<string> = new Set([
|
||||
// Apple
|
||||
"com.apple.Safari",
|
||||
"com.apple.SafariTechnologyPreview",
|
||||
// Google
|
||||
"com.google.Chrome",
|
||||
"com.google.Chrome.beta",
|
||||
"com.google.Chrome.dev",
|
||||
"com.google.Chrome.canary",
|
||||
// Microsoft
|
||||
"com.microsoft.edgemac",
|
||||
"com.microsoft.edgemac.Beta",
|
||||
"com.microsoft.edgemac.Dev",
|
||||
"com.microsoft.edgemac.Canary",
|
||||
// Mozilla
|
||||
"org.mozilla.firefox",
|
||||
"org.mozilla.firefoxdeveloperedition",
|
||||
"org.mozilla.nightly",
|
||||
// Chromium-based
|
||||
"org.chromium.Chromium",
|
||||
"com.brave.Browser",
|
||||
"com.brave.Browser.beta",
|
||||
"com.brave.Browser.nightly",
|
||||
"com.operasoftware.Opera",
|
||||
"com.operasoftware.OperaGX",
|
||||
"com.operasoftware.OperaDeveloper",
|
||||
"com.vivaldi.Vivaldi",
|
||||
// The Browser Company
|
||||
"company.thebrowser.Browser", // Arc
|
||||
"company.thebrowser.dia", // Dia (agentic)
|
||||
// Privacy-focused
|
||||
"org.torproject.torbrowser",
|
||||
"com.duckduckgo.macos.browser",
|
||||
"ru.yandex.desktop.yandex-browser",
|
||||
// Agentic / AI browsers — newer entrants with LLM integrations
|
||||
"ai.perplexity.comet",
|
||||
"com.sigmaos.sigmaos.macos", // SigmaOS
|
||||
// Webkit-based misc
|
||||
"com.kagi.kagimacOS", // Orion
|
||||
]);
|
||||
|
||||
/**
|
||||
* Terminals + IDEs with integrated terminals. Supersets
|
||||
* `SHELL_ACCESS_BUNDLE_IDS` from sentinelApps.ts — terminals proceed to the
|
||||
* approval dialog at tier "click", and the sentinel warning renders
|
||||
* alongside the tier badge.
|
||||
*/
|
||||
const TERMINAL_BUNDLE_IDS: ReadonlySet<string> = new Set([
|
||||
// Dedicated terminals
|
||||
"com.apple.Terminal",
|
||||
"com.googlecode.iterm2",
|
||||
"dev.warp.Warp-Stable",
|
||||
"dev.warp.Warp-Beta",
|
||||
"com.github.wez.wezterm",
|
||||
"org.alacritty",
|
||||
"io.alacritty", // pre-v0.11.0 (renamed 2022-07) — kept for legacy installs
|
||||
"net.kovidgoyal.kitty",
|
||||
"co.zeit.hyper",
|
||||
"com.mitchellh.ghostty",
|
||||
"org.tabby",
|
||||
"com.termius-dmg.mac", // Termius
|
||||
// IDEs with integrated terminals — we can't distinguish "type in the
|
||||
// editor" from "type in the integrated terminal" via screenshot+click.
|
||||
// VS Code family
|
||||
"com.microsoft.VSCode",
|
||||
"com.microsoft.VSCodeInsiders",
|
||||
"com.vscodium", // VSCodium
|
||||
"com.todesktop.230313mzl4w4u92", // Cursor
|
||||
"com.exafunction.windsurf", // Windsurf / Codeium
|
||||
"dev.zed.Zed",
|
||||
"dev.zed.Zed-Preview",
|
||||
// JetBrains family (all have integrated terminals)
|
||||
"com.jetbrains.intellij",
|
||||
"com.jetbrains.intellij.ce",
|
||||
"com.jetbrains.pycharm",
|
||||
"com.jetbrains.pycharm.ce",
|
||||
"com.jetbrains.WebStorm",
|
||||
"com.jetbrains.CLion",
|
||||
"com.jetbrains.goland",
|
||||
"com.jetbrains.rubymine",
|
||||
"com.jetbrains.PhpStorm",
|
||||
"com.jetbrains.datagrip",
|
||||
"com.jetbrains.rider",
|
||||
"com.jetbrains.AppCode",
|
||||
"com.jetbrains.rustrover",
|
||||
"com.jetbrains.fleet",
|
||||
"com.google.android.studio", // Android Studio (JetBrains-based)
|
||||
// Other IDEs
|
||||
"com.axosoft.gitkraken", // GitKraken has an integrated terminal panel. Also keeps the "kraken" trading-substring from miscategorizing it — bundle-ID wins.
|
||||
"com.sublimetext.4",
|
||||
"com.sublimetext.3",
|
||||
"org.vim.MacVim",
|
||||
"com.neovim.neovim",
|
||||
"org.gnu.Emacs",
|
||||
// Xcode's previous carve-out (full tier for Interface Builder / simulator)
|
||||
// was reversed — at tier "click" IB and simulator taps still work (both are
|
||||
// plain clicks) while the integrated terminal is blocked from keyboard input.
|
||||
"com.apple.dt.Xcode",
|
||||
"org.eclipse.platform.ide",
|
||||
"org.netbeans.ide",
|
||||
"com.microsoft.visual-studio", // Visual Studio for Mac
|
||||
// AppleScript/automation execution surfaces — same threat as terminals:
|
||||
// type(script) → key("cmd+r") runs arbitrary code. Added after #28011
|
||||
// removed the osascript MCP server, making CU the only tool-call route
|
||||
// to AppleScript.
|
||||
"com.apple.ScriptEditor2",
|
||||
"com.apple.Automator",
|
||||
"com.apple.shortcuts",
|
||||
]);
|
||||
|
||||
/**
|
||||
* Trading / crypto platforms — granted at tier `"read"` so the agent can see
|
||||
* balances and prices but can't click into an order, transfer, or IB chat.
|
||||
* Bundle IDs populated from Homebrew cask `uninstall.quit` stanzas as they're
|
||||
* verified; the name-substring fallback below is the primary check. Bloomberg
|
||||
* Terminal has no native macOS build per their FAQ (web/Citrix only).
|
||||
*
|
||||
* Budgeting/accounting apps (Quicken, YNAB, QuickBooks, etc.) are NOT listed
|
||||
* here — they default to tier `"full"`. The risk model for brokerage/crypto
|
||||
* (a stray click can execute a trade) doesn't apply to budgeting apps; the
|
||||
* Cowork system prompt carries the soft instruction to never execute trades
|
||||
* or transfer money on the user's behalf.
|
||||
*/
|
||||
const TRADING_BUNDLE_IDS: ReadonlySet<string> = new Set([
|
||||
// Verified via Homebrew quit/zap stanzas + mdls + electron-builder source.
|
||||
// Trading
|
||||
"com.webull.desktop.v1", // Webull (direct download, Qt)
|
||||
"com.webull.trade.mac.v1", // Webull (Mac App Store)
|
||||
"com.tastytrade.desktop",
|
||||
"com.tradingview.tradingviewapp.desktop",
|
||||
"com.fidelity.activetrader", // Fidelity Trader+ (new)
|
||||
"com.fmr.activetrader", // Fidelity Active Trader Pro (legacy)
|
||||
// Interactive Brokers TWS — install4j wrapper; Homebrew quit stanza is
|
||||
// authoritative for this exact value but install4j IDs can drift across
|
||||
// major versions — name-substring "trader workstation" is the fallback.
|
||||
"com.install4j.5889-6375-8446-2021",
|
||||
// Crypto
|
||||
"com.binance.BinanceDesktop",
|
||||
"com.electron.exodus",
|
||||
// Electrum uses PyInstaller with bundle_identifier=None → defaults to
|
||||
// org.pythonmac.unspecified.<AppName>. Confirmed in spesmilo/electrum
|
||||
// source + Homebrew zap. IntuneBrew's "org.electrum.electrum" is a fork.
|
||||
"org.pythonmac.unspecified.Electrum",
|
||||
"com.ledger.live",
|
||||
"io.trezor.TrezorSuite",
|
||||
// No native macOS app (name-substring only): Schwab, E*TRADE, TradeStation,
|
||||
// Robinhood, NinjaTrader, Coinbase, Kraken, Bloomberg. thinkorswim
|
||||
// install4j ID drifts per-install — substring safer.
|
||||
]);
|
||||
|
||||
// ─── Policy-deny (not a tier — cannot be granted at all) ─────────────────
|
||||
//
|
||||
// Streaming / ebook / music apps and a handful of publisher apps. These
|
||||
// are auto-denied before the approval dialog — no tier can be granted.
|
||||
// Rationale is copyright / content-control (the agent has no legitimate
|
||||
// need to screenshot Netflix or click Play on Spotify).
|
||||
//
|
||||
// Sourced from the ACP CU-apps blocklist xlsx ("Full block" tab). See
|
||||
// /tmp/extract_cu_blocklist.py for the extraction script.
|
||||
|
||||
const POLICY_DENIED_BUNDLE_IDS: ReadonlySet<string> = new Set([
|
||||
// Verified via Homebrew quit/zap + mdls /System/Applications + IntuneBrew.
|
||||
// Apple built-ins
|
||||
"com.apple.TV",
|
||||
"com.apple.Music",
|
||||
"com.apple.iBooksX",
|
||||
"com.apple.podcasts",
|
||||
// Music
|
||||
"com.spotify.client",
|
||||
"com.amazon.music",
|
||||
"com.tidal.desktop",
|
||||
"com.deezer.deezer-desktop",
|
||||
"com.pandora.desktop",
|
||||
"com.electron.pocket-casts", // direct-download Electron wrapper
|
||||
"au.com.shiftyjelly.PocketCasts", // Mac App Store
|
||||
// Video
|
||||
"tv.plex.desktop",
|
||||
"tv.plex.htpc",
|
||||
"tv.plex.plexamp",
|
||||
"com.amazon.aiv.AIVApp", // Prime Video (iOS-on-Apple-Silicon)
|
||||
// Ebooks
|
||||
"net.kovidgoyal.calibre",
|
||||
"com.amazon.Kindle", // legacy desktop, discontinued
|
||||
"com.amazon.Lassen", // current Mac App Store (iOS-on-Mac)
|
||||
"com.kobo.desktop.Kobo",
|
||||
// No native macOS app (name-substring only): Netflix, Disney+, Hulu,
|
||||
// HBO Max, Peacock, Paramount+, YouTube, Crunchyroll, Tubi, Vudu,
|
||||
// Audible, Reddit, NYTimes. Their iOS apps don't opt into iPad-on-Mac.
|
||||
]);
|
||||
|
||||
const POLICY_DENIED_NAME_SUBSTRINGS: readonly string[] = [
|
||||
// Video streaming
|
||||
"netflix",
|
||||
"disney+",
|
||||
"hulu",
|
||||
"prime video",
|
||||
"apple tv",
|
||||
"peacock",
|
||||
"paramount+",
|
||||
// "plex" is too generic — would match "Perplexity". Covered by
|
||||
// tv.plex.* bundle IDs on macOS.
|
||||
"tubi",
|
||||
"crunchyroll",
|
||||
"vudu",
|
||||
// E-readers / audiobooks
|
||||
"kindle",
|
||||
"apple books",
|
||||
"kobo",
|
||||
"play books",
|
||||
"calibre",
|
||||
"libby",
|
||||
"readium",
|
||||
"audible",
|
||||
"libro.fm",
|
||||
"speechify",
|
||||
// Music
|
||||
"spotify",
|
||||
"apple music",
|
||||
"amazon music",
|
||||
"youtube music",
|
||||
"tidal",
|
||||
"deezer",
|
||||
"pandora",
|
||||
"pocket casts",
|
||||
// Publisher / social apps (from the same blocklist tab)
|
||||
"naver",
|
||||
"reddit",
|
||||
"sony music",
|
||||
"vegas pro",
|
||||
"pitchfork",
|
||||
"economist",
|
||||
"nytimes",
|
||||
// Skipped (too generic for substring matching — need bundle ID):
|
||||
// HBO Max / Max, YouTube (non-Music), Nook, Sony Catalyst, Wired
|
||||
];
|
||||
|
||||
/**
|
||||
* Policy-level auto-deny. Unlike `userDeniedBundleIds` (per-user Settings
|
||||
* page), this is baked into the build. `buildAccessRequest` strips these
|
||||
* before the approval dialog with "blocked by policy" guidance; the agent
|
||||
* is told to not retry.
|
||||
*/
|
||||
export function isPolicyDenied(
|
||||
bundleId: string | undefined,
|
||||
displayName: string,
|
||||
): boolean {
|
||||
if (bundleId && POLICY_DENIED_BUNDLE_IDS.has(bundleId)) return true;
|
||||
const lower = displayName.toLowerCase();
|
||||
for (const sub of POLICY_DENIED_NAME_SUBSTRINGS) {
|
||||
if (lower.includes(sub)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function getDeniedCategory(bundleId: string): DeniedCategory | null {
|
||||
if (BROWSER_BUNDLE_IDS.has(bundleId)) return "browser";
|
||||
if (TERMINAL_BUNDLE_IDS.has(bundleId)) return "terminal";
|
||||
if (TRADING_BUNDLE_IDS.has(bundleId)) return "trading";
|
||||
return null;
|
||||
}
|
||||
|
||||
// ─── Display-name fallback (cross-platform) ──────────────────────────────
|
||||
|
||||
/**
|
||||
* Lowercase substrings checked against the requested display name. Catches:
|
||||
* - Unresolved requests (app not installed, Spotlight miss)
|
||||
* - Future Windows/Linux support where bundleId is meaningless
|
||||
*
|
||||
* Matched via `.includes()` on `name.toLowerCase()`. Entries are ordered
|
||||
* by specificity (more-specific first is irrelevant since we return on
|
||||
* first match, but groupings are by category for readability).
|
||||
*/
|
||||
const BROWSER_NAME_SUBSTRINGS: readonly string[] = [
|
||||
"safari",
|
||||
"chrome",
|
||||
"firefox",
|
||||
"microsoft edge",
|
||||
"brave",
|
||||
"opera",
|
||||
"vivaldi",
|
||||
"chromium",
|
||||
// Arc/Dia: the canonical display name is just "Arc"/"Dia" — too short for
|
||||
// substring matching (false-positives: "Arcade", "Diagram"). Covered by
|
||||
// bundle ID on macOS. The "... browser" entries below catch natural-language
|
||||
// phrasings ("the arc browser") but NOT the canonical short name.
|
||||
"arc browser",
|
||||
"tor browser",
|
||||
"duckduckgo",
|
||||
"yandex",
|
||||
"orion browser",
|
||||
// Agentic / AI browsers
|
||||
"comet", // Perplexity's browser — "Comet" substring risks false positives
|
||||
// but leaving for now; "comet" in an app name is rare
|
||||
"sigmaos",
|
||||
"dia browser",
|
||||
];
|
||||
|
||||
const TERMINAL_NAME_SUBSTRINGS: readonly string[] = [
|
||||
// macOS / cross-platform terminals
|
||||
"terminal", // catches Terminal, Windows Terminal (NOT iTerm — separate entry)
|
||||
"iterm",
|
||||
"wezterm",
|
||||
"alacritty",
|
||||
"kitty",
|
||||
"ghostty",
|
||||
"tabby",
|
||||
"termius",
|
||||
// AppleScript runners — see bundle-ID comment above. "shortcuts" is too
|
||||
// generic for substring matching (many apps have "shortcuts" in the name);
|
||||
// covered by bundle ID only, like warp/hyper.
|
||||
"script editor",
|
||||
"automator",
|
||||
// NOTE: "warp" and "hyper" are too generic for substring matching —
|
||||
// they'd false-positive on "Warpaint" or "Hyperion". Covered by bundle ID
|
||||
// (dev.warp.Warp-Stable, co.zeit.hyper) for macOS; Windows exe-name
|
||||
// matching can be added when Windows CU ships.
|
||||
// Windows shells (activate when the darwin gate lifts)
|
||||
"powershell",
|
||||
"cmd.exe",
|
||||
"command prompt",
|
||||
"git bash",
|
||||
"conemu",
|
||||
"cmder",
|
||||
// IDEs (VS Code family)
|
||||
"visual studio code",
|
||||
"visual studio", // catches VS for Mac + Windows
|
||||
"vscode",
|
||||
"vs code",
|
||||
"vscodium",
|
||||
"cursor", // Cursor IDE — "cursor" is generic but IDE is the only common app
|
||||
"windsurf",
|
||||
// Zed: display name is just "Zed" — too short for substring matching
|
||||
// (false-positives). Covered by bundle ID (dev.zed.Zed) on macOS.
|
||||
// IDEs (JetBrains family)
|
||||
"intellij",
|
||||
"pycharm",
|
||||
"webstorm",
|
||||
"clion",
|
||||
"goland",
|
||||
"rubymine",
|
||||
"phpstorm",
|
||||
"datagrip",
|
||||
"rider",
|
||||
"appcode",
|
||||
"rustrover",
|
||||
"fleet",
|
||||
"android studio",
|
||||
// Other IDEs
|
||||
"sublime text",
|
||||
"macvim",
|
||||
"neovim",
|
||||
"emacs",
|
||||
"xcode",
|
||||
"eclipse",
|
||||
"netbeans",
|
||||
];
|
||||
|
||||
const TRADING_NAME_SUBSTRINGS: readonly string[] = [
|
||||
// Trading — brokerage apps. Sourced from the ACP CU-apps blocklist xlsx
|
||||
// ("Read Only" tab). Name-substring safe for proper nouns below; generic
|
||||
// names (IG, Delta, HTX) are skipped and need bundle-ID matching once
|
||||
// verified.
|
||||
"bloomberg",
|
||||
"ameritrade",
|
||||
"thinkorswim",
|
||||
"schwab",
|
||||
"fidelity",
|
||||
"e*trade",
|
||||
"interactive brokers",
|
||||
"trader workstation", // Interactive Brokers TWS
|
||||
"tradestation",
|
||||
"webull",
|
||||
"robinhood",
|
||||
"tastytrade",
|
||||
"ninjatrader",
|
||||
"tradingview",
|
||||
"moomoo",
|
||||
"tradezero",
|
||||
"prorealtime",
|
||||
"plus500",
|
||||
"saxotrader",
|
||||
"oanda",
|
||||
"metatrader",
|
||||
"forex.com",
|
||||
"avaoptions",
|
||||
"ctrader",
|
||||
"jforex",
|
||||
"iq option",
|
||||
"olymp trade",
|
||||
"binomo",
|
||||
"pocket option",
|
||||
"raceoption",
|
||||
"expertoption",
|
||||
"quotex",
|
||||
"naga",
|
||||
"morgan stanley",
|
||||
"ubs neo",
|
||||
"eikon", // Thomson Reuters / LSEG Workspace
|
||||
// Crypto — exchanges, wallets, portfolio trackers
|
||||
"coinbase",
|
||||
"kraken",
|
||||
"binance",
|
||||
"okx",
|
||||
"bybit",
|
||||
// "gate.io" is too generic — the ".io" TLD suffix is common in app names
|
||||
// (e.g., "Draw.io"). Needs bundle-ID matching once verified.
|
||||
"phemex",
|
||||
"stormgain",
|
||||
"crypto.com",
|
||||
// "exodus" is too generic — it's a common noun and would match unrelated
|
||||
// apps/games. Needs bundle-ID matching once verified.
|
||||
"electrum",
|
||||
"ledger live",
|
||||
"trezor",
|
||||
"guarda",
|
||||
"atomic wallet",
|
||||
"bitpay",
|
||||
"bisq",
|
||||
"koinly",
|
||||
"cointracker",
|
||||
"blockfi",
|
||||
"stripe cli",
|
||||
// Crypto games / metaverse (same trade-execution risk model)
|
||||
"decentraland",
|
||||
"axie infinity",
|
||||
"gods unchained",
|
||||
];
|
||||
|
||||
/**
|
||||
* Display-name substring match. Called when bundle-ID resolution returned
|
||||
* nothing (`resolved === undefined`) or when no bundle-ID deny-list entry
|
||||
* matched. Returns the category for the first matching substring, or null.
|
||||
*
|
||||
* Case-insensitive, substring — so `"Google Chrome"`, `"chrome"`, and
|
||||
* `"Chrome Canary"` all match the `"chrome"` entry.
|
||||
*/
|
||||
export function getDeniedCategoryByDisplayName(
|
||||
name: string,
|
||||
): DeniedCategory | null {
|
||||
const lower = name.toLowerCase();
|
||||
// Trading first — proper-noun-only set, most specific. "Bloomberg Terminal"
|
||||
// contains "terminal" and would miscategorize if TERMINAL_NAME_SUBSTRINGS
|
||||
// ran first.
|
||||
for (const sub of TRADING_NAME_SUBSTRINGS) {
|
||||
if (lower.includes(sub)) return "trading";
|
||||
}
|
||||
for (const sub of BROWSER_NAME_SUBSTRINGS) {
|
||||
if (lower.includes(sub)) return "browser";
|
||||
}
|
||||
for (const sub of TERMINAL_NAME_SUBSTRINGS) {
|
||||
if (lower.includes(sub)) return "terminal";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Combined check — bundle ID first (exact, fast), then display-name
|
||||
* fallback. This is the function tool-call handlers should use.
|
||||
*
|
||||
* `bundleId` may be undefined (unresolved request — model asked for an app
|
||||
* that isn't installed or Spotlight didn't find). In that case only the
|
||||
* display-name check runs.
|
||||
*/
|
||||
export function getDeniedCategoryForApp(
|
||||
bundleId: string | undefined,
|
||||
displayName: string,
|
||||
): DeniedCategory | null {
|
||||
if (bundleId) {
|
||||
const byId = getDeniedCategory(bundleId);
|
||||
if (byId) return byId;
|
||||
}
|
||||
return getDeniedCategoryByDisplayName(displayName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Default tier for an app at grant time. Wraps `getDeniedCategoryForApp` +
|
||||
* `categoryToTier`. Browsers → `"read"`, terminals/IDEs → `"click"`,
|
||||
* everything else → `"full"`.
|
||||
*
|
||||
* Called by `buildAccessRequest` to populate `ResolvedAppRequest.proposedTier`
|
||||
* before the approval dialog shows.
|
||||
*/
|
||||
export function getDefaultTierForApp(
|
||||
bundleId: string | undefined,
|
||||
displayName: string,
|
||||
): "read" | "click" | "full" {
|
||||
return categoryToTier(getDeniedCategoryForApp(bundleId, displayName));
|
||||
}
|
||||
|
||||
export const _test = {
|
||||
BROWSER_BUNDLE_IDS,
|
||||
TERMINAL_BUNDLE_IDS,
|
||||
TRADING_BUNDLE_IDS,
|
||||
POLICY_DENIED_BUNDLE_IDS,
|
||||
BROWSER_NAME_SUBSTRINGS,
|
||||
TERMINAL_NAME_SUBSTRINGS,
|
||||
TRADING_NAME_SUBSTRINGS,
|
||||
POLICY_DENIED_NAME_SUBSTRINGS,
|
||||
};
|
||||
108
extracted-source/node_modules/@ant/computer-use-mcp/src/imageResize.ts
generated
vendored
108
extracted-source/node_modules/@ant/computer-use-mcp/src/imageResize.ts
generated
vendored
@@ -1,108 +0,0 @@
|
||||
/**
|
||||
* Port of the API's image transcoder target-size algorithm. Pre-sizing
|
||||
* screenshots to this function's output means the API's early-return fires
|
||||
* (tokens ≤ max) and the image is NOT resized server-side — so the model
|
||||
* sees exactly the dimensions in `ScreenshotResult.width/height` and
|
||||
* `scaleCoord` stays coherent.
|
||||
*
|
||||
* Rust reference: api/api/image_transcoder/rust_transcoder/src/utils/resize.rs
|
||||
* Sibling TS port: apps/claude-browser-use/src/utils/imageResize.ts (identical
|
||||
* algorithm, lives in the Chrome extension tree — not a shared package).
|
||||
*
|
||||
* See COORDINATES.md for why this matters for click accuracy.
|
||||
*/
|
||||
|
||||
export interface ResizeParams {
|
||||
pxPerToken: number;
|
||||
maxTargetPx: number;
|
||||
maxTargetTokens: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Production defaults — match `resize.rs:160-164` and Chrome's
|
||||
* `CDPService.ts:638-642`. Vision encoder uses 28px tiles; 1568 is both
|
||||
* the long-edge cap (56 tiles) AND the token budget.
|
||||
*/
|
||||
export const API_RESIZE_PARAMS: ResizeParams = {
|
||||
pxPerToken: 28,
|
||||
maxTargetPx: 1568,
|
||||
maxTargetTokens: 1568,
|
||||
};
|
||||
|
||||
/** ceil(px / pxPerToken). Matches resize.rs:74-76 (which uses integer ceil-div). */
|
||||
export function nTokensForPx(px: number, pxPerToken: number): number {
|
||||
return Math.floor((px - 1) / pxPerToken) + 1;
|
||||
}
|
||||
|
||||
function nTokensForImg(
|
||||
width: number,
|
||||
height: number,
|
||||
pxPerToken: number,
|
||||
): number {
|
||||
return nTokensForPx(width, pxPerToken) * nTokensForPx(height, pxPerToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* Binary-search along the width dimension for the largest image that:
|
||||
* - preserves the input aspect ratio
|
||||
* - has long edge ≤ maxTargetPx
|
||||
* - has ceil(w/pxPerToken) × ceil(h/pxPerToken) ≤ maxTargetTokens
|
||||
*
|
||||
* Returns [width, height]. No-op if input already satisfies all three.
|
||||
*
|
||||
* The long-edge constraint alone (what we used to use) is insufficient on
|
||||
* squarer-than-16:9 displays: 1568×1014 (MBP 16" AR) is 56×37 = 2072 tokens,
|
||||
* over budget, and gets server-resized to 1372×887 — model then clicks in
|
||||
* 1372-space but scaleCoord assumed 1568-space → ~14% coord error.
|
||||
*
|
||||
* Matches resize.rs:91-155 exactly (verified against its test vectors).
|
||||
*/
|
||||
export function targetImageSize(
|
||||
width: number,
|
||||
height: number,
|
||||
params: ResizeParams,
|
||||
): [number, number] {
|
||||
const { pxPerToken, maxTargetPx, maxTargetTokens } = params;
|
||||
|
||||
if (
|
||||
width <= maxTargetPx &&
|
||||
height <= maxTargetPx &&
|
||||
nTokensForImg(width, height, pxPerToken) <= maxTargetTokens
|
||||
) {
|
||||
return [width, height];
|
||||
}
|
||||
|
||||
// Normalize to landscape for the search; transpose result back.
|
||||
if (height > width) {
|
||||
const [w, h] = targetImageSize(height, width, params);
|
||||
return [h, w];
|
||||
}
|
||||
|
||||
const aspectRatio = width / height;
|
||||
|
||||
// Loop invariant: lowerBoundWidth is always valid, upperBoundWidth is
|
||||
// always invalid. ~12 iterations for a 4000px image.
|
||||
let upperBoundWidth = width;
|
||||
let lowerBoundWidth = 1;
|
||||
|
||||
for (;;) {
|
||||
if (lowerBoundWidth + 1 === upperBoundWidth) {
|
||||
return [
|
||||
lowerBoundWidth,
|
||||
Math.max(Math.round(lowerBoundWidth / aspectRatio), 1),
|
||||
];
|
||||
}
|
||||
|
||||
const middleWidth = Math.floor((lowerBoundWidth + upperBoundWidth) / 2);
|
||||
const middleHeight = Math.max(Math.round(middleWidth / aspectRatio), 1);
|
||||
|
||||
if (
|
||||
middleWidth <= maxTargetPx &&
|
||||
nTokensForImg(middleWidth, middleHeight, pxPerToken) <= maxTargetTokens
|
||||
) {
|
||||
lowerBoundWidth = middleWidth;
|
||||
} else {
|
||||
upperBoundWidth = middleWidth;
|
||||
}
|
||||
}
|
||||
}
|
||||
69
extracted-source/node_modules/@ant/computer-use-mcp/src/index.ts
generated
vendored
69
extracted-source/node_modules/@ant/computer-use-mcp/src/index.ts
generated
vendored
@@ -1,69 +0,0 @@
|
||||
export type {
|
||||
ComputerExecutor,
|
||||
DisplayGeometry,
|
||||
FrontmostApp,
|
||||
InstalledApp,
|
||||
ResolvePrepareCaptureResult,
|
||||
RunningApp,
|
||||
ScreenshotResult,
|
||||
} from "./executor.js";
|
||||
|
||||
export type {
|
||||
AppGrant,
|
||||
CuAppPermTier,
|
||||
ComputerUseHostAdapter,
|
||||
ComputerUseOverrides,
|
||||
ComputerUseSessionContext,
|
||||
CoordinateMode,
|
||||
CuGrantFlags,
|
||||
CuPermissionRequest,
|
||||
CuPermissionResponse,
|
||||
CuSubGates,
|
||||
CuTeachPermissionRequest,
|
||||
Logger,
|
||||
ResolvedAppRequest,
|
||||
ScreenshotDims,
|
||||
TeachStepRequest,
|
||||
TeachStepResult,
|
||||
} from "./types.js";
|
||||
|
||||
export { DEFAULT_GRANT_FLAGS } from "./types.js";
|
||||
|
||||
export {
|
||||
SENTINEL_BUNDLE_IDS,
|
||||
getSentinelCategory,
|
||||
} from "./sentinelApps.js";
|
||||
export type { SentinelCategory } from "./sentinelApps.js";
|
||||
|
||||
export {
|
||||
categoryToTier,
|
||||
getDefaultTierForApp,
|
||||
getDeniedCategory,
|
||||
getDeniedCategoryByDisplayName,
|
||||
getDeniedCategoryForApp,
|
||||
isPolicyDenied,
|
||||
} from "./deniedApps.js";
|
||||
export type { DeniedCategory } from "./deniedApps.js";
|
||||
|
||||
export { isSystemKeyCombo, normalizeKeySequence } from "./keyBlocklist.js";
|
||||
|
||||
export { ALL_SUB_GATES_OFF, ALL_SUB_GATES_ON } from "./subGates.js";
|
||||
|
||||
export { API_RESIZE_PARAMS, targetImageSize } from "./imageResize.js";
|
||||
export type { ResizeParams } from "./imageResize.js";
|
||||
|
||||
export { defersLockAcquire, handleToolCall } from "./toolCalls.js";
|
||||
export type {
|
||||
CuCallTelemetry,
|
||||
CuCallToolResult,
|
||||
CuErrorKind,
|
||||
} from "./toolCalls.js";
|
||||
|
||||
export { bindSessionContext, createComputerUseMcpServer } from "./mcpServer.js";
|
||||
export { buildComputerUseTools } from "./tools.js";
|
||||
|
||||
export {
|
||||
comparePixelAtLocation,
|
||||
validateClickTarget,
|
||||
} from "./pixelCompare.js";
|
||||
export type { CropRawPatchFn, PixelCompareResult } from "./pixelCompare.js";
|
||||
153
extracted-source/node_modules/@ant/computer-use-mcp/src/keyBlocklist.ts
generated
vendored
153
extracted-source/node_modules/@ant/computer-use-mcp/src/keyBlocklist.ts
generated
vendored
@@ -1,153 +0,0 @@
|
||||
/**
|
||||
* Key combos that cross app boundaries or terminate processes. Gated behind
|
||||
* the `systemKeyCombos` grant flag. When that flag is off, the `key` tool
|
||||
* rejects these and returns a tool error telling the model to request the
|
||||
* flag; all other combos work normally.
|
||||
*
|
||||
* Matching is canonicalized: every modifier alias the Rust executor accepts
|
||||
* collapses to one canonical name. Without this, `command+q` / `meta+q` /
|
||||
* `cmd+alt+escape` bypass the gate — see keyBlocklist.test.ts for the three
|
||||
* bypass forms and the Rust parity check that catches future alias drift.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Every modifier alias enigo_wrap.rs accepts (two copies: :351-359, :564-572),
|
||||
* mapped to one canonical per Key:: variant. Left/right variants collapse —
|
||||
* the blocklist doesn't distinguish which Ctrl.
|
||||
*
|
||||
* Canonical names are Rust's own variant names lowercased. Blocklist entries
|
||||
* below use ONLY these. "meta" reads odd for Cmd+Q but it's honest: Rust
|
||||
* sends Key::Meta, which is Cmd on darwin and Win on win32.
|
||||
*/
|
||||
const CANONICAL_MODIFIER: Readonly<Record<string, string>> = {
|
||||
// Key::Meta — "meta"|"super"|"command"|"cmd"|"windows"|"win"
|
||||
meta: "meta",
|
||||
super: "meta",
|
||||
command: "meta",
|
||||
cmd: "meta",
|
||||
windows: "meta",
|
||||
win: "meta",
|
||||
// Key::Control + LControl + RControl
|
||||
ctrl: "ctrl",
|
||||
control: "ctrl",
|
||||
lctrl: "ctrl",
|
||||
lcontrol: "ctrl",
|
||||
rctrl: "ctrl",
|
||||
rcontrol: "ctrl",
|
||||
// Key::Shift + LShift + RShift
|
||||
shift: "shift",
|
||||
lshift: "shift",
|
||||
rshift: "shift",
|
||||
// Key::Alt and Key::Option — distinct Rust variants but same keycode on
|
||||
// darwin (kVK_Option). Collapse: cmd+alt+escape and cmd+option+escape
|
||||
// both Force Quit.
|
||||
alt: "alt",
|
||||
option: "alt",
|
||||
};
|
||||
|
||||
/** Sort order for canonicals. ctrl < alt < shift < meta. */
|
||||
const MODIFIER_ORDER = ["ctrl", "alt", "shift", "meta"];
|
||||
|
||||
/**
|
||||
* Canonical-form entries only. Every modifier must be a CANONICAL_MODIFIER
|
||||
* *value* (not key), modifiers must be in MODIFIER_ORDER, non-modifier last.
|
||||
* The self-consistency test enforces this.
|
||||
*/
|
||||
const BLOCKED_DARWIN = new Set([
|
||||
"meta+q", // Cmd+Q — quit frontmost app
|
||||
"shift+meta+q", // Cmd+Shift+Q — log out
|
||||
"alt+meta+escape", // Cmd+Option+Esc — Force Quit dialog
|
||||
"meta+tab", // Cmd+Tab — app switcher
|
||||
"meta+space", // Cmd+Space — Spotlight
|
||||
"ctrl+meta+q", // Ctrl+Cmd+Q — lock screen
|
||||
]);
|
||||
|
||||
const BLOCKED_WIN32 = new Set([
|
||||
"ctrl+alt+delete", // Secure Attention Sequence
|
||||
"alt+f4", // close window
|
||||
"alt+tab", // window switcher
|
||||
"meta+l", // Win+L — lock
|
||||
"meta+d", // Win+D — show desktop
|
||||
]);
|
||||
|
||||
/**
|
||||
* Partition into sorted-canonical modifiers and non-modifier keys.
|
||||
* Shared by normalizeKeySequence (join for display) and isSystemKeyCombo
|
||||
* (check mods+each-key to catch the cmd+q+a suffix bypass).
|
||||
*/
|
||||
function partitionKeys(seq: string): { mods: string[]; keys: string[] } {
|
||||
const parts = seq
|
||||
.toLowerCase()
|
||||
.split("+")
|
||||
.map((p) => p.trim())
|
||||
.filter(Boolean);
|
||||
const mods: string[] = [];
|
||||
const keys: string[] = [];
|
||||
for (const p of parts) {
|
||||
const canonical = CANONICAL_MODIFIER[p];
|
||||
if (canonical !== undefined) {
|
||||
mods.push(canonical);
|
||||
} else {
|
||||
keys.push(p);
|
||||
}
|
||||
}
|
||||
// Dedupe: "cmd+command+q" → "meta+q", not "meta+meta+q".
|
||||
const uniqueMods = [...new Set(mods)];
|
||||
uniqueMods.sort(
|
||||
(a, b) => MODIFIER_ORDER.indexOf(a) - MODIFIER_ORDER.indexOf(b),
|
||||
);
|
||||
return { mods: uniqueMods, keys };
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize "Cmd + Shift + Q" → "shift+meta+q": lowercase, trim, alias →
|
||||
* canonical, dedupe, sort modifiers, non-modifiers last.
|
||||
*/
|
||||
export function normalizeKeySequence(seq: string): string {
|
||||
const { mods, keys } = partitionKeys(seq);
|
||||
return [...mods, ...keys].join("+");
|
||||
}
|
||||
|
||||
/**
|
||||
* True if the sequence would fire a blocked OS shortcut.
|
||||
*
|
||||
* Checks mods + EACH non-modifier key individually, not just the full
|
||||
* joined string. `cmd+q+a` → Rust presses Cmd, then Q (Cmd+Q fires here),
|
||||
* then A. Exact-match against "meta+q+a" misses; checking "meta+q" and
|
||||
* "meta+a" separately catches the Q.
|
||||
*
|
||||
* Modifiers-only sequences ("cmd+shift") are checked as-is — no key to
|
||||
* pair with, and no blocklist entry is modifier-only, so this is a no-op
|
||||
* that falls through to false. Covers the click-modifier case where
|
||||
* `left_click(text="cmd")` is legitimate.
|
||||
*/
|
||||
export function isSystemKeyCombo(
|
||||
seq: string,
|
||||
platform: "darwin" | "win32",
|
||||
): boolean {
|
||||
const blocklist = platform === "darwin" ? BLOCKED_DARWIN : BLOCKED_WIN32;
|
||||
const { mods, keys } = partitionKeys(seq);
|
||||
const prefix = mods.length > 0 ? mods.join("+") + "+" : "";
|
||||
|
||||
// No non-modifier keys (e.g. "cmd+shift" as click-modifiers) — check the
|
||||
// whole thing. Never matches (no blocklist entry is modifier-only) but
|
||||
// keeps the contract simple: every call reaches a .has().
|
||||
if (keys.length === 0) {
|
||||
return blocklist.has(mods.join("+"));
|
||||
}
|
||||
|
||||
// mods + each key. Any hit blocks the whole sequence.
|
||||
for (const key of keys) {
|
||||
if (blocklist.has(prefix + key)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export const _test = {
|
||||
CANONICAL_MODIFIER,
|
||||
BLOCKED_DARWIN,
|
||||
BLOCKED_WIN32,
|
||||
MODIFIER_ORDER,
|
||||
};
|
||||
313
extracted-source/node_modules/@ant/computer-use-mcp/src/mcpServer.ts
generated
vendored
313
extracted-source/node_modules/@ant/computer-use-mcp/src/mcpServer.ts
generated
vendored
@@ -1,313 +0,0 @@
|
||||
/**
|
||||
* MCP server factory + session-context binder.
|
||||
*
|
||||
* Two entry points:
|
||||
*
|
||||
* `bindSessionContext` — the wrapper closure. Takes a `ComputerUseSessionContext`
|
||||
* (getters + callbacks backed by host session state), returns a dispatcher.
|
||||
* Reusable by both the MCP CallTool handler here AND Cowork's
|
||||
* `InternalServerDefinition.handleToolCall` (which doesn't go through MCP).
|
||||
* This replaces the duplicated wrapper closures in apps/desktop/…/serverDef.ts
|
||||
* and the Claude Code CLI's CU host wrapper — both did the same thing: build `ComputerUseOverrides`
|
||||
* fresh from getters, call `handleToolCall`, stash screenshot, merge permissions.
|
||||
*
|
||||
* `createComputerUseMcpServer` — the Server object. When `context` is provided,
|
||||
* the CallTool handler is real (uses `bindSessionContext`). When not, it's the
|
||||
* legacy stub that returns a not-wired error. The tool-schema ListTools handler
|
||||
* is the same either way.
|
||||
*/
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import type { CallToolResult } from "@modelcontextprotocol/sdk/types.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
|
||||
import type { ScreenshotResult } from "./executor.js";
|
||||
import type { CuCallToolResult } from "./toolCalls.js";
|
||||
import {
|
||||
defersLockAcquire,
|
||||
handleToolCall,
|
||||
resetMouseButtonHeld,
|
||||
} from "./toolCalls.js";
|
||||
import { buildComputerUseTools } from "./tools.js";
|
||||
import type {
|
||||
AppGrant,
|
||||
ComputerUseHostAdapter,
|
||||
ComputerUseOverrides,
|
||||
ComputerUseSessionContext,
|
||||
CoordinateMode,
|
||||
CuGrantFlags,
|
||||
CuPermissionResponse,
|
||||
} from "./types.js";
|
||||
import { DEFAULT_GRANT_FLAGS } from "./types.js";
|
||||
|
||||
const DEFAULT_LOCK_HELD_MESSAGE =
|
||||
"Another Claude session is currently using the computer. Wait for that " +
|
||||
"session to finish, or find a non-computer-use approach.";
|
||||
|
||||
/**
|
||||
* Dedupe `granted` into `existing` on bundleId, spread truthy-only flags over
|
||||
* defaults+existing. Truthy-only: a subsequent `request_access` that doesn't
|
||||
* request clipboard can't revoke an earlier clipboard grant — revocation lives
|
||||
* in a Settings page, not here.
|
||||
*
|
||||
* Same merge both hosts implemented independently today.
|
||||
*/
|
||||
function mergePermissionResponse(
|
||||
existing: readonly AppGrant[],
|
||||
existingFlags: CuGrantFlags,
|
||||
response: CuPermissionResponse,
|
||||
): { apps: AppGrant[]; flags: CuGrantFlags } {
|
||||
const seen = new Set(existing.map((a) => a.bundleId));
|
||||
const apps = [
|
||||
...existing,
|
||||
...response.granted.filter((g) => !seen.has(g.bundleId)),
|
||||
];
|
||||
const truthyFlags = Object.fromEntries(
|
||||
Object.entries(response.flags).filter(([, v]) => v === true),
|
||||
);
|
||||
const flags: CuGrantFlags = {
|
||||
...DEFAULT_GRANT_FLAGS,
|
||||
...existingFlags,
|
||||
...truthyFlags,
|
||||
};
|
||||
return { apps, flags };
|
||||
}
|
||||
|
||||
/**
|
||||
* Bind session state to a reusable dispatcher. The returned function is the
|
||||
* wrapper closure: async lock gate → build overrides fresh → `handleToolCall`
|
||||
* → stash screenshot → strip piggybacked fields.
|
||||
*
|
||||
* The last-screenshot blob is held in a closure cell here (not on `ctx`), so
|
||||
* hosts don't need to guarantee `ctx` object identity across calls — they just
|
||||
* need to hold onto the returned dispatcher. Cowork caches per
|
||||
* `InternalServerContext` in a WeakMap; the CLI host constructs once at server creation.
|
||||
*/
|
||||
export function bindSessionContext(
|
||||
adapter: ComputerUseHostAdapter,
|
||||
coordinateMode: CoordinateMode,
|
||||
ctx: ComputerUseSessionContext,
|
||||
): (name: string, args: unknown) => Promise<CuCallToolResult> {
|
||||
const { logger, serverName } = adapter;
|
||||
|
||||
// Screenshot blob persists here across calls — NOT on `ctx`. Hosts hold
|
||||
// onto the returned dispatcher; that's the identity that matters.
|
||||
let lastScreenshot: ScreenshotResult | undefined;
|
||||
|
||||
const wrapPermission = ctx.onPermissionRequest
|
||||
? async (
|
||||
req: Parameters<NonNullable<typeof ctx.onPermissionRequest>>[0],
|
||||
signal: AbortSignal,
|
||||
): Promise<CuPermissionResponse> => {
|
||||
const response = await ctx.onPermissionRequest!(req, signal);
|
||||
const { apps, flags } = mergePermissionResponse(
|
||||
ctx.getAllowedApps(),
|
||||
ctx.getGrantFlags(),
|
||||
response,
|
||||
);
|
||||
logger.debug(
|
||||
`[${serverName}] permission result: granted=${response.granted.length} denied=${response.denied.length}`,
|
||||
);
|
||||
ctx.onAllowedAppsChanged?.(apps, flags);
|
||||
return response;
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const wrapTeachPermission = ctx.onTeachPermissionRequest
|
||||
? async (
|
||||
req: Parameters<NonNullable<typeof ctx.onTeachPermissionRequest>>[0],
|
||||
signal: AbortSignal,
|
||||
): Promise<CuPermissionResponse> => {
|
||||
const response = await ctx.onTeachPermissionRequest!(req, signal);
|
||||
logger.debug(
|
||||
`[${serverName}] teach permission result: granted=${response.granted.length} denied=${response.denied.length}`,
|
||||
);
|
||||
// Teach doesn't request grant flags — preserve existing.
|
||||
const { apps } = mergePermissionResponse(
|
||||
ctx.getAllowedApps(),
|
||||
ctx.getGrantFlags(),
|
||||
response,
|
||||
);
|
||||
ctx.onAllowedAppsChanged?.(apps, {
|
||||
...DEFAULT_GRANT_FLAGS,
|
||||
...ctx.getGrantFlags(),
|
||||
});
|
||||
return response;
|
||||
}
|
||||
: undefined;
|
||||
|
||||
return async (name, args) => {
|
||||
// ─── Async lock gate ─────────────────────────────────────────────────
|
||||
// Replaces the sync Gate-3 in `handleToolCall` — we pass
|
||||
// `checkCuLock: undefined` below so it no-ops. Hosts with
|
||||
// cross-process locks (O_EXCL file) await the real primitive here
|
||||
// instead of pre-computing + feeding a fake sync result.
|
||||
if (ctx.checkCuLock) {
|
||||
const lock = await ctx.checkCuLock();
|
||||
if (lock.holder !== undefined && !lock.isSelf) {
|
||||
const text =
|
||||
ctx.formatLockHeldMessage?.(lock.holder) ?? DEFAULT_LOCK_HELD_MESSAGE;
|
||||
return {
|
||||
content: [{ type: "text", text }],
|
||||
isError: true,
|
||||
telemetry: { error_kind: "cu_lock_held" },
|
||||
};
|
||||
}
|
||||
if (lock.holder === undefined && !defersLockAcquire(name)) {
|
||||
await ctx.acquireCuLock?.();
|
||||
// Re-check: the awaits above yield the microtask queue, so another
|
||||
// session's check+acquire can interleave with ours. Hosts where
|
||||
// acquire is a no-op when already held (Cowork's CuLockManager) give
|
||||
// no signal that we lost — verify we're now the holder before
|
||||
// proceeding. The CLI's O_EXCL file lock would surface this as a throw from
|
||||
// acquire instead; this re-check is a belt-and-suspenders for that
|
||||
// path too.
|
||||
const recheck = await ctx.checkCuLock();
|
||||
if (recheck.holder !== undefined && !recheck.isSelf) {
|
||||
const text =
|
||||
ctx.formatLockHeldMessage?.(recheck.holder) ??
|
||||
DEFAULT_LOCK_HELD_MESSAGE;
|
||||
return {
|
||||
content: [{ type: "text", text }],
|
||||
isError: true,
|
||||
telemetry: { error_kind: "cu_lock_held" },
|
||||
};
|
||||
}
|
||||
// Fresh holder → any prior session's mouseButtonHeld is stale.
|
||||
// Mirrors what Gate-3 does on the acquire branch. After the
|
||||
// re-check so we only clear module state when we actually won.
|
||||
resetMouseButtonHeld();
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Build overrides fresh ───────────────────────────────────────────
|
||||
// Blob-first; dims-fallback with base64:"" when the closure cell is
|
||||
// unset (cross-respawn). scaleCoord reads dims; pixelCompare sees "" →
|
||||
// isEmpty → skip.
|
||||
const dimsFallback = lastScreenshot
|
||||
? undefined
|
||||
: ctx.getLastScreenshotDims?.();
|
||||
|
||||
// Per-call AbortController for dialog dismissal. Aborted in `finally` —
|
||||
// if handleToolCall finishes (MCP timeout, throw) before the user
|
||||
// answers, the host's dialog handler sees the abort and tears down.
|
||||
const dialogAbort = new AbortController();
|
||||
|
||||
const overrides: ComputerUseOverrides = {
|
||||
allowedApps: [...ctx.getAllowedApps()],
|
||||
grantFlags: ctx.getGrantFlags(),
|
||||
userDeniedBundleIds: ctx.getUserDeniedBundleIds(),
|
||||
coordinateMode,
|
||||
selectedDisplayId: ctx.getSelectedDisplayId(),
|
||||
displayPinnedByModel: ctx.getDisplayPinnedByModel?.(),
|
||||
displayResolvedForApps: ctx.getDisplayResolvedForApps?.(),
|
||||
lastScreenshot:
|
||||
lastScreenshot ??
|
||||
(dimsFallback ? { ...dimsFallback, base64: "" } : undefined),
|
||||
onPermissionRequest: wrapPermission
|
||||
? (req) => wrapPermission(req, dialogAbort.signal)
|
||||
: undefined,
|
||||
onTeachPermissionRequest: wrapTeachPermission
|
||||
? (req) => wrapTeachPermission(req, dialogAbort.signal)
|
||||
: undefined,
|
||||
onAppsHidden: ctx.onAppsHidden,
|
||||
getClipboardStash: ctx.getClipboardStash,
|
||||
onClipboardStashChanged: ctx.onClipboardStashChanged,
|
||||
onResolvedDisplayUpdated: ctx.onResolvedDisplayUpdated,
|
||||
onDisplayPinned: ctx.onDisplayPinned,
|
||||
onDisplayResolvedForApps: ctx.onDisplayResolvedForApps,
|
||||
onTeachModeActivated: ctx.onTeachModeActivated,
|
||||
onTeachStep: ctx.onTeachStep,
|
||||
onTeachWorking: ctx.onTeachWorking,
|
||||
getTeachModeActive: ctx.getTeachModeActive,
|
||||
// Undefined → handleToolCall's sync Gate-3 no-ops. The async gate
|
||||
// above already ran.
|
||||
checkCuLock: undefined,
|
||||
acquireCuLock: undefined,
|
||||
isAborted: ctx.isAborted,
|
||||
};
|
||||
|
||||
logger.debug(
|
||||
`[${serverName}] tool=${name} allowedApps=${overrides.allowedApps.length} coordMode=${coordinateMode}`,
|
||||
);
|
||||
|
||||
// ─── Dispatch ────────────────────────────────────────────────────────
|
||||
try {
|
||||
const result = await handleToolCall(adapter, name, args, overrides);
|
||||
|
||||
if (result.screenshot) {
|
||||
lastScreenshot = result.screenshot;
|
||||
const { base64: _blob, ...dims } = result.screenshot;
|
||||
logger.debug(`[${serverName}] screenshot dims: ${JSON.stringify(dims)}`);
|
||||
ctx.onScreenshotCaptured?.(dims);
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
dialogAbort.abort();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function createComputerUseMcpServer(
|
||||
adapter: ComputerUseHostAdapter,
|
||||
coordinateMode: CoordinateMode,
|
||||
context?: ComputerUseSessionContext,
|
||||
): Server {
|
||||
const { serverName, logger } = adapter;
|
||||
|
||||
const server = new Server(
|
||||
{ name: serverName, version: "0.1.3" },
|
||||
{ capabilities: { tools: {}, logging: {} } },
|
||||
);
|
||||
|
||||
const tools = buildComputerUseTools(
|
||||
adapter.executor.capabilities,
|
||||
coordinateMode,
|
||||
);
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () =>
|
||||
adapter.isDisabled() ? { tools: [] } : { tools },
|
||||
);
|
||||
|
||||
if (context) {
|
||||
const dispatch = bindSessionContext(adapter, coordinateMode, context);
|
||||
server.setRequestHandler(
|
||||
CallToolRequestSchema,
|
||||
async (request): Promise<CallToolResult> => {
|
||||
const { screenshot: _s, telemetry: _t, ...result } = await dispatch(
|
||||
request.params.name,
|
||||
request.params.arguments ?? {},
|
||||
);
|
||||
return result;
|
||||
},
|
||||
);
|
||||
return server;
|
||||
}
|
||||
|
||||
// Legacy: no context → stub handler. Reached only if something calls the
|
||||
// server over MCP transport WITHOUT going through a binder (a wiring
|
||||
// regression). Clear error instead of silent failure.
|
||||
server.setRequestHandler(
|
||||
CallToolRequestSchema,
|
||||
async (request): Promise<CallToolResult> => {
|
||||
logger.warn(
|
||||
`[${serverName}] tool call "${request.params.name}" reached the stub handler — no session context bound. Per-session state unavailable.`,
|
||||
);
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "This computer-use server instance is not wired to a session. Per-session app permissions are not available on this code path.",
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
return server;
|
||||
}
|
||||
171
extracted-source/node_modules/@ant/computer-use-mcp/src/pixelCompare.ts
generated
vendored
171
extracted-source/node_modules/@ant/computer-use-mcp/src/pixelCompare.ts
generated
vendored
@@ -1,171 +0,0 @@
|
||||
/**
|
||||
* Staleness guard ported from the Vercept acquisition.
|
||||
*
|
||||
* Compares the model's last-seen screenshot against a fresh-right-now
|
||||
* screenshot at the click target, so the model never clicks pixels it hasn't
|
||||
* seen. If the 9×9 patch around the target differs, the click is aborted and
|
||||
* the model is told to re-screenshot. This is NOT a popup detector.
|
||||
*
|
||||
* Semantics preserved exactly:
|
||||
* - Skip on no `lastScreenshot` (cold start) — click proceeds.
|
||||
* - Skip on any internal error (crop throws, screenshot fails, etc.) —
|
||||
* click proceeds. Validation failure must never block the action.
|
||||
* - 9×9 exact byte equality on raw pixel bytes. No fuzzing, no tolerance.
|
||||
* - Compare in percentage coords so Retina scale doesn't matter.
|
||||
*
|
||||
* JPEG decode + crop is INJECTED via `ComputerUseHostAdapter.cropRawPatch`.
|
||||
* The original used `sharp` (LGPL, native `.node` addon); we inject Electron's
|
||||
* `nativeImage` (Chromium decoders, BSD, nothing to bundle) from the host, so
|
||||
* this package never imports it — the crop is a function parameter.
|
||||
*/
|
||||
|
||||
import type { ScreenshotResult } from "./executor.js";
|
||||
import type { Logger } from "./types.js";
|
||||
|
||||
/** Injected by the host. See `ComputerUseHostAdapter.cropRawPatch`. */
|
||||
export type CropRawPatchFn = (
|
||||
jpegBase64: string,
|
||||
rect: { x: number; y: number; width: number; height: number },
|
||||
) => Buffer | null;
|
||||
|
||||
/** 9×9 is empirically the sweet spot — large enough to catch a tooltip
|
||||
* appearing, small enough to not false-positive on surrounding animation.
|
||||
**/
|
||||
const DEFAULT_GRID_SIZE = 9;
|
||||
|
||||
export interface PixelCompareResult {
|
||||
/** true → click may proceed. false → patch changed, abort the click. */
|
||||
valid: boolean;
|
||||
/** true → validation did not run (cold start, sub-gate off, or internal
|
||||
* error). The caller MUST treat this identically to `valid: true`. */
|
||||
skipped: boolean;
|
||||
/** Populated when valid === false. Returned to the model verbatim. */
|
||||
warning?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the crop rect for a patch centered on (xPercent, yPercent).
|
||||
*
|
||||
* Dimensions come from ScreenshotResult.width/height (physical pixels). Both
|
||||
* screenshots have the same dimensions (same display, consecutive captures),
|
||||
* so the rect is the same for both.
|
||||
*/
|
||||
function computeCropRect(
|
||||
imgW: number,
|
||||
imgH: number,
|
||||
xPercent: number,
|
||||
yPercent: number,
|
||||
gridSize: number,
|
||||
): { x: number; y: number; width: number; height: number } | null {
|
||||
if (!imgW || !imgH) return null;
|
||||
|
||||
const clampedX = Math.max(0, Math.min(100, xPercent));
|
||||
const clampedY = Math.max(0, Math.min(100, yPercent));
|
||||
|
||||
const centerX = Math.round((clampedX / 100.0) * imgW);
|
||||
const centerY = Math.round((clampedY / 100.0) * imgH);
|
||||
|
||||
const halfGrid = Math.floor(gridSize / 2);
|
||||
const cropX = Math.max(0, centerX - halfGrid);
|
||||
const cropY = Math.max(0, centerY - halfGrid);
|
||||
const cropW = Math.min(gridSize, imgW - cropX);
|
||||
const cropH = Math.min(gridSize, imgH - cropY);
|
||||
if (cropW <= 0 || cropH <= 0) return null;
|
||||
|
||||
return { x: cropX, y: cropY, width: cropW, height: cropH };
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare the same patch location between two screenshots.
|
||||
*
|
||||
* @returns true when the raw pixel bytes are identical. false on any
|
||||
* difference, or on any internal error (the caller treats an error here as
|
||||
* `skipped`, so the false is harmless).
|
||||
*/
|
||||
export function comparePixelAtLocation(
|
||||
crop: CropRawPatchFn,
|
||||
lastScreenshot: ScreenshotResult,
|
||||
freshScreenshot: ScreenshotResult,
|
||||
xPercent: number,
|
||||
yPercent: number,
|
||||
gridSize: number = DEFAULT_GRID_SIZE,
|
||||
): boolean {
|
||||
// Both screenshots are of the same display — use the fresh one's
|
||||
// dimensions (less likely to be stale than last's).
|
||||
const rect = computeCropRect(
|
||||
freshScreenshot.width,
|
||||
freshScreenshot.height,
|
||||
xPercent,
|
||||
yPercent,
|
||||
gridSize,
|
||||
);
|
||||
if (!rect) return false;
|
||||
|
||||
const patch1 = crop(lastScreenshot.base64, rect);
|
||||
const patch2 = crop(freshScreenshot.base64, rect);
|
||||
if (!patch1 || !patch2) return false;
|
||||
|
||||
// Direct buffer equality. Note: nativeImage.toBitmap() gives BGRA, sharp's
|
||||
// .raw() gave RGB.
|
||||
// Doesn't matter — we're comparing two same-format buffers for equality.
|
||||
return patch1.equals(patch2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Battle-tested click-target validation ported from the Vercept acquisition,
|
||||
* with the fresh-screenshot capture delegated to the caller (we don't have
|
||||
* a global `SystemActions.takeScreenshot()` — the executor is injected).
|
||||
*
|
||||
* Skip conditions (any of these → `{ valid: true, skipped: true }`):
|
||||
* - `lastScreenshot` is undefined (cold start).
|
||||
* - `takeFreshScreenshot()` throws or returns null.
|
||||
* - Injected crop function returns null (decode failure).
|
||||
* - Any other exception.
|
||||
*
|
||||
* The caller decides whether to invoke this at all (sub-gate check lives
|
||||
* in toolCalls.ts, not here).
|
||||
*/
|
||||
export async function validateClickTarget(
|
||||
crop: CropRawPatchFn,
|
||||
lastScreenshot: ScreenshotResult | undefined,
|
||||
xPercent: number,
|
||||
yPercent: number,
|
||||
takeFreshScreenshot: () => Promise<ScreenshotResult | null>,
|
||||
logger: Logger,
|
||||
gridSize: number = DEFAULT_GRID_SIZE,
|
||||
): Promise<PixelCompareResult> {
|
||||
if (!lastScreenshot) {
|
||||
return { valid: true, skipped: true };
|
||||
}
|
||||
|
||||
try {
|
||||
const fresh = await takeFreshScreenshot();
|
||||
if (!fresh) {
|
||||
return { valid: true, skipped: true };
|
||||
}
|
||||
|
||||
const pixelsMatch = comparePixelAtLocation(
|
||||
crop,
|
||||
lastScreenshot,
|
||||
fresh,
|
||||
xPercent,
|
||||
yPercent,
|
||||
gridSize,
|
||||
);
|
||||
|
||||
if (pixelsMatch) {
|
||||
return { valid: true, skipped: false };
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
skipped: false,
|
||||
warning:
|
||||
"Screen content at the target location changed since the last screenshot. Take a new screenshot before clicking.",
|
||||
};
|
||||
} catch (err) {
|
||||
// Skip validation on technical errors, execute action anyway.
|
||||
// Battle-tested: validation failure must never block the click.
|
||||
logger.debug("[pixelCompare] validation error, skipping", err);
|
||||
return { valid: true, skipped: true };
|
||||
}
|
||||
}
|
||||
43
extracted-source/node_modules/@ant/computer-use-mcp/src/sentinelApps.ts
generated
vendored
43
extracted-source/node_modules/@ant/computer-use-mcp/src/sentinelApps.ts
generated
vendored
@@ -1,43 +0,0 @@
|
||||
/**
|
||||
* Bundle IDs that are escalations-in-disguise. The approval UI shows a warning
|
||||
* badge for these; they are NOT blocked. Power users may legitimately want the
|
||||
* model controlling a terminal.
|
||||
*
|
||||
* Imported by the renderer via the `./sentinelApps` subpath (package.json
|
||||
* `exports`), which keeps Next.js from reaching index.ts → mcpServer.ts →
|
||||
* @modelcontextprotocol/sdk (devDep, would fail module resolution). Keep
|
||||
* this file import-free so the subpath stays clean.
|
||||
*/
|
||||
|
||||
/** These apps can execute arbitrary shell commands. */
|
||||
const SHELL_ACCESS_BUNDLE_IDS = new Set([
|
||||
"com.apple.Terminal",
|
||||
"com.googlecode.iterm2",
|
||||
"com.microsoft.VSCode",
|
||||
"dev.warp.Warp-Stable",
|
||||
"com.github.wez.wezterm",
|
||||
"io.alacritty",
|
||||
"net.kovidgoyal.kitty",
|
||||
"com.jetbrains.intellij",
|
||||
"com.jetbrains.pycharm",
|
||||
]);
|
||||
|
||||
/** Finder in the allowlist ≈ browse + open-any-file. */
|
||||
const FILESYSTEM_ACCESS_BUNDLE_IDS = new Set(["com.apple.finder"]);
|
||||
|
||||
const SYSTEM_SETTINGS_BUNDLE_IDS = new Set(["com.apple.systempreferences"]);
|
||||
|
||||
export const SENTINEL_BUNDLE_IDS: ReadonlySet<string> = new Set([
|
||||
...SHELL_ACCESS_BUNDLE_IDS,
|
||||
...FILESYSTEM_ACCESS_BUNDLE_IDS,
|
||||
...SYSTEM_SETTINGS_BUNDLE_IDS,
|
||||
]);
|
||||
|
||||
export type SentinelCategory = "shell" | "filesystem" | "system_settings";
|
||||
|
||||
export function getSentinelCategory(bundleId: string): SentinelCategory | null {
|
||||
if (SHELL_ACCESS_BUNDLE_IDS.has(bundleId)) return "shell";
|
||||
if (FILESYSTEM_ACCESS_BUNDLE_IDS.has(bundleId)) return "filesystem";
|
||||
if (SYSTEM_SETTINGS_BUNDLE_IDS.has(bundleId)) return "system_settings";
|
||||
return null;
|
||||
}
|
||||
3649
extracted-source/node_modules/@ant/computer-use-mcp/src/toolCalls.ts
generated
vendored
3649
extracted-source/node_modules/@ant/computer-use-mcp/src/toolCalls.ts
generated
vendored
File diff suppressed because it is too large
Load Diff
706
extracted-source/node_modules/@ant/computer-use-mcp/src/tools.ts
generated
vendored
706
extracted-source/node_modules/@ant/computer-use-mcp/src/tools.ts
generated
vendored
@@ -1,706 +0,0 @@
|
||||
/**
|
||||
* MCP tool schemas for the computer-use server. Mirrors
|
||||
* claude-for-chrome-mcp/src/browserTools.ts in shape (plain `Tool`-shaped
|
||||
* object literals, no zod).
|
||||
*
|
||||
* Coordinate descriptions are baked in at tool-list build time from the
|
||||
* `chicago_coordinate_mode` gate. The model sees exactly ONE coordinate
|
||||
* convention in the param descriptions and never learns the other exists.
|
||||
* The host (`serverDef.ts`) reads the same frozen gate value for
|
||||
* `scaleCoord` — both must agree or clicks land in the wrong space.
|
||||
*/
|
||||
|
||||
import type { Tool } from "@modelcontextprotocol/sdk/types.js";
|
||||
|
||||
import type { CoordinateMode } from "./types.js";
|
||||
|
||||
// See packages/desktop/computer-use-mcp/COORDINATES.md before touching any
|
||||
// model-facing coordinate text. Chrome's browserTools.ts:143 is the reference
|
||||
// phrasing — "pixels from the left edge", no geometry, no number to do math with.
|
||||
const COORD_DESC: Record<CoordinateMode, { x: string; y: string }> = {
|
||||
pixels: {
|
||||
x: "Horizontal pixel position read directly from the most recent screenshot image, measured from the left edge. The server handles all scaling.",
|
||||
y: "Vertical pixel position read directly from the most recent screenshot image, measured from the top edge. The server handles all scaling.",
|
||||
},
|
||||
normalized_0_100: {
|
||||
x: "Horizontal position as a percentage of screen width, 0.0–100.0 (0 = left edge, 100 = right edge).",
|
||||
y: "Vertical position as a percentage of screen height, 0.0–100.0 (0 = top edge, 100 = bottom edge).",
|
||||
},
|
||||
};
|
||||
|
||||
const FRONTMOST_GATE_DESC =
|
||||
"The frontmost application must be in the session allowlist at the time of this call, or this tool returns an error and does nothing.";
|
||||
|
||||
/**
|
||||
* Item schema for the `actions` array in `computer_batch`, `teach_step`, and
|
||||
* `teach_batch`. All three dispatch through the same `dispatchAction` path
|
||||
* with the same validation — keep this enum in sync with `BATCHABLE_ACTIONS`
|
||||
* in toolCalls.ts.
|
||||
*/
|
||||
const BATCH_ACTION_ITEM_SCHEMA = {
|
||||
type: "object",
|
||||
properties: {
|
||||
action: {
|
||||
type: "string",
|
||||
enum: [
|
||||
"key",
|
||||
"type",
|
||||
"mouse_move",
|
||||
"left_click",
|
||||
"left_click_drag",
|
||||
"right_click",
|
||||
"middle_click",
|
||||
"double_click",
|
||||
"triple_click",
|
||||
"scroll",
|
||||
"hold_key",
|
||||
"screenshot",
|
||||
"cursor_position",
|
||||
"left_mouse_down",
|
||||
"left_mouse_up",
|
||||
"wait",
|
||||
],
|
||||
description: "The action to perform.",
|
||||
},
|
||||
coordinate: {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
minItems: 2,
|
||||
maxItems: 2,
|
||||
description:
|
||||
"(x, y) for click/mouse_move/scroll/left_click_drag end point.",
|
||||
},
|
||||
start_coordinate: {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
minItems: 2,
|
||||
maxItems: 2,
|
||||
description:
|
||||
"(x, y) drag start — left_click_drag only. Omit to drag from current cursor.",
|
||||
},
|
||||
text: {
|
||||
type: "string",
|
||||
description:
|
||||
"For type: the text. For key/hold_key: the chord string. For click/scroll: modifier keys to hold.",
|
||||
},
|
||||
scroll_direction: {
|
||||
type: "string",
|
||||
enum: ["up", "down", "left", "right"],
|
||||
},
|
||||
scroll_amount: { type: "integer", minimum: 0, maximum: 100 },
|
||||
duration: {
|
||||
type: "number",
|
||||
description: "Seconds (0–100). For hold_key/wait.",
|
||||
},
|
||||
repeat: {
|
||||
type: "integer",
|
||||
minimum: 1,
|
||||
maximum: 100,
|
||||
description: "For key: repeat count.",
|
||||
},
|
||||
},
|
||||
required: ["action"],
|
||||
};
|
||||
|
||||
/**
|
||||
* Build the tool list. Parameterized by capabilities and coordinate mode so
|
||||
* descriptions are honest and unambiguous (plan §1 — "Unfiltered + honest").
|
||||
*
|
||||
* `coordinateMode` MUST match what the host passes to `scaleCoord` at tool-
|
||||
* -call time. Both should read the same frozen-at-load gate constant.
|
||||
*
|
||||
* `installedAppNames` — optional pre-sanitized list of app display names to
|
||||
* enumerate in the `request_access` description. The caller is responsible
|
||||
* for sanitization (length cap, character allowlist, sort, count cap) —
|
||||
* this function just splices the list into the description verbatim. Omit
|
||||
* to fall back to the generic "display names or bundle IDs" wording.
|
||||
*/
|
||||
export function buildComputerUseTools(
|
||||
caps: {
|
||||
screenshotFiltering: "native" | "none";
|
||||
platform: "darwin" | "win32";
|
||||
/** Include request_teach_access + teach_step. Read once at server construction. */
|
||||
teachMode?: boolean;
|
||||
},
|
||||
coordinateMode: CoordinateMode,
|
||||
installedAppNames?: string[],
|
||||
): Tool[] {
|
||||
const coord = COORD_DESC[coordinateMode];
|
||||
|
||||
// Shared hint suffix for BOTH request_access and request_teach_access —
|
||||
// they use the same resolveRequestedApps path, so the model should get
|
||||
// the same enumeration for both.
|
||||
const installedAppsHint =
|
||||
installedAppNames && installedAppNames.length > 0
|
||||
? ` Available applications on this machine: ${installedAppNames.join(", ")}.`
|
||||
: "";
|
||||
|
||||
// [x, y]` tuple — param shape for all
|
||||
// click/move/scroll tools.
|
||||
const coordinateTuple = {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
minItems: 2,
|
||||
maxItems: 2,
|
||||
description: `(x, y): ${coord.x}`,
|
||||
};
|
||||
// Modifier hold during click. Shared across all 5 click variants.
|
||||
const clickModifierText = {
|
||||
type: "string",
|
||||
description:
|
||||
'Modifier keys to hold during the click (e.g. "shift", "ctrl+shift"). Supports the same syntax as the key tool.',
|
||||
};
|
||||
|
||||
const screenshotDesc =
|
||||
caps.screenshotFiltering === "native"
|
||||
? "Take a screenshot of the primary display. Applications not in the session allowlist are excluded at the compositor level — only granted apps and the desktop are visible."
|
||||
: "Take a screenshot of the primary display. On this platform, screenshots are NOT filtered — all open windows are visible. Input actions targeting apps not in the session allowlist are rejected.";
|
||||
|
||||
return [
|
||||
{
|
||||
name: "request_access",
|
||||
description:
|
||||
"Request user permission to control a set of applications for this session. Must be called before any other tool in this server. " +
|
||||
"The user sees a single dialog listing all requested apps and either allows the whole set or denies it. " +
|
||||
"Call this again mid-session to add more apps; previously granted apps remain granted. " +
|
||||
"Returns the granted apps, denied apps, and screenshot filtering capability.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
apps: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description:
|
||||
"Application display names (e.g. \"Slack\", \"Calendar\") or bundle identifiers (e.g. \"com.tinyspeck.slackmacgap\"). Display names are resolved case-insensitively against installed apps." +
|
||||
installedAppsHint,
|
||||
},
|
||||
reason: {
|
||||
type: "string",
|
||||
description:
|
||||
"One-sentence explanation shown to the user in the approval dialog. Explain the task, not the mechanism.",
|
||||
},
|
||||
clipboardRead: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Also request permission to read the user's clipboard (separate checkbox in the dialog).",
|
||||
},
|
||||
clipboardWrite: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Also request permission to write the user's clipboard. When granted, multi-line `type` calls use the clipboard fast path.",
|
||||
},
|
||||
systemKeyCombos: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Also request permission to send system-level key combos (quit app, switch app, lock screen). Without this, those specific combos are blocked.",
|
||||
},
|
||||
},
|
||||
required: ["apps", "reason"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "screenshot",
|
||||
description:
|
||||
screenshotDesc +
|
||||
" Returns an error if the allowlist is empty. The returned image is what subsequent click coordinates are relative to.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
save_to_disk: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Save the image to disk so it can be attached to a message for the user. Returns the saved path in the tool result. Only set this when you intend to share the image — screenshots you're just looking at don't need saving.",
|
||||
},
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "zoom",
|
||||
description:
|
||||
"Take a higher-resolution screenshot of a specific region of the last full-screen screenshot. Use this liberally to inspect small text, button labels, or fine UI details that are hard to read in the downsampled full-screen image. " +
|
||||
"IMPORTANT: Coordinates in subsequent click calls always refer to the full-screen screenshot, never the zoomed image. This tool is read-only for inspecting detail.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
region: {
|
||||
type: "array",
|
||||
items: { type: "integer" },
|
||||
minItems: 4,
|
||||
maxItems: 4,
|
||||
description:
|
||||
"(x0, y0, x1, y1): Rectangle to zoom into, in the coordinate space of the most recent full-screen screenshot. x0,y0 = top-left, x1,y1 = bottom-right.",
|
||||
},
|
||||
save_to_disk: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"Save the image to disk so it can be attached to a message for the user. Returns the saved path in the tool result. Only set this when you intend to share the image.",
|
||||
},
|
||||
},
|
||||
required: ["region"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "left_click",
|
||||
description: `Left-click at the given coordinates. ${FRONTMOST_GATE_DESC}`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
coordinate: coordinateTuple,
|
||||
text: clickModifierText,
|
||||
},
|
||||
required: ["coordinate"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "double_click",
|
||||
description: `Double-click at the given coordinates. Selects a word in most text editors. ${FRONTMOST_GATE_DESC}`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
coordinate: coordinateTuple,
|
||||
text: clickModifierText,
|
||||
},
|
||||
required: ["coordinate"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "triple_click",
|
||||
description: `Triple-click at the given coordinates. Selects a line in most text editors. ${FRONTMOST_GATE_DESC}`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
coordinate: coordinateTuple,
|
||||
text: clickModifierText,
|
||||
},
|
||||
required: ["coordinate"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "right_click",
|
||||
description: `Right-click at the given coordinates. Opens a context menu in most applications. ${FRONTMOST_GATE_DESC}`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
coordinate: coordinateTuple,
|
||||
text: clickModifierText,
|
||||
},
|
||||
required: ["coordinate"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "middle_click",
|
||||
description: `Middle-click (scroll-wheel click) at the given coordinates. ${FRONTMOST_GATE_DESC}`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
coordinate: coordinateTuple,
|
||||
text: clickModifierText,
|
||||
},
|
||||
required: ["coordinate"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "type",
|
||||
description: `Type text into whatever currently has keyboard focus. ${FRONTMOST_GATE_DESC} Newlines are supported. For keyboard shortcuts use \`key\` instead.`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
text: { type: "string", description: "Text to type." },
|
||||
},
|
||||
required: ["text"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "key",
|
||||
description:
|
||||
`Press a key or key combination (e.g. "return", "escape", "cmd+a", "ctrl+shift+tab"). ${FRONTMOST_GATE_DESC} ` +
|
||||
"System-level combos (quit app, switch app, lock screen) require the `systemKeyCombos` grant — without it they return an error. All other combos work.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
text: {
|
||||
type: "string",
|
||||
description: 'Modifiers joined with "+", e.g. "cmd+shift+a".',
|
||||
},
|
||||
repeat: {
|
||||
type: "integer",
|
||||
minimum: 1,
|
||||
maximum: 100,
|
||||
description: "Number of times to repeat the key press. Default is 1.",
|
||||
},
|
||||
},
|
||||
required: ["text"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "scroll",
|
||||
description: `Scroll at the given coordinates. ${FRONTMOST_GATE_DESC}`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
coordinate: coordinateTuple,
|
||||
scroll_direction: {
|
||||
type: "string",
|
||||
enum: ["up", "down", "left", "right"],
|
||||
description: "Direction to scroll.",
|
||||
},
|
||||
scroll_amount: {
|
||||
type: "integer",
|
||||
minimum: 0,
|
||||
maximum: 100,
|
||||
description: "Number of scroll ticks.",
|
||||
},
|
||||
},
|
||||
required: ["coordinate", "scroll_direction", "scroll_amount"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "left_click_drag",
|
||||
description: `Press, move to target, and release. ${FRONTMOST_GATE_DESC}`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
coordinate: {
|
||||
...coordinateTuple,
|
||||
description: `(x, y) end point: ${coord.x}`,
|
||||
},
|
||||
start_coordinate: {
|
||||
...coordinateTuple,
|
||||
description: `(x, y) start point. If omitted, drags from the current cursor position. ${coord.x}`,
|
||||
},
|
||||
},
|
||||
required: ["coordinate"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "mouse_move",
|
||||
description: `Move the mouse cursor without clicking. Useful for triggering hover states. ${FRONTMOST_GATE_DESC}`,
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
coordinate: coordinateTuple,
|
||||
},
|
||||
required: ["coordinate"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "open_application",
|
||||
description:
|
||||
"Bring an application to the front, launching it if necessary. The target application must already be in the session allowlist — call request_access first.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
app: {
|
||||
type: "string",
|
||||
description:
|
||||
"Display name (e.g. \"Slack\") or bundle identifier (e.g. \"com.tinyspeck.slackmacgap\").",
|
||||
},
|
||||
},
|
||||
required: ["app"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "switch_display",
|
||||
description:
|
||||
"Switch which monitor subsequent screenshots capture. Use this when the " +
|
||||
"application you need is on a different monitor than the one shown. " +
|
||||
"The screenshot tool tells you which monitor it captured and lists " +
|
||||
"other attached monitors by name — pass one of those names here. " +
|
||||
"After switching, call screenshot to see the new monitor. " +
|
||||
'Pass "auto" to return to automatic monitor selection.',
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
display: {
|
||||
type: "string",
|
||||
description:
|
||||
'Monitor name from the screenshot note (e.g. "Built-in Retina Display", ' +
|
||||
'"LG UltraFine"), or "auto" to re-enable automatic selection.',
|
||||
},
|
||||
},
|
||||
required: ["display"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "list_granted_applications",
|
||||
description:
|
||||
"List the applications currently in the session allowlist, plus the active grant flags and coordinate mode. No side effects.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "read_clipboard",
|
||||
description:
|
||||
"Read the current clipboard contents as text. Requires the `clipboardRead` grant.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "write_clipboard",
|
||||
description:
|
||||
"Write text to the clipboard. Requires the `clipboardWrite` grant.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
text: { type: "string" },
|
||||
},
|
||||
required: ["text"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "wait",
|
||||
description: "Wait for a specified duration.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
duration: {
|
||||
type: "number",
|
||||
description: "Duration in seconds (0–100).",
|
||||
},
|
||||
},
|
||||
required: ["duration"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "cursor_position",
|
||||
description:
|
||||
"Get the current mouse cursor position. Returns image-pixel coordinates relative to the most recent screenshot, or logical points if no screenshot has been taken.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "hold_key",
|
||||
description:
|
||||
`Press and hold a key or key combination for the specified duration, then release. ${FRONTMOST_GATE_DESC} ` +
|
||||
"System-level combos require the `systemKeyCombos` grant.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
text: {
|
||||
type: "string",
|
||||
description: 'Key or chord to hold, e.g. "space", "shift+down".',
|
||||
},
|
||||
duration: {
|
||||
type: "number",
|
||||
description: "Duration in seconds (0–100).",
|
||||
},
|
||||
},
|
||||
required: ["text", "duration"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "left_mouse_down",
|
||||
description:
|
||||
`Press the left mouse button at the current cursor position and leave it held. ${FRONTMOST_GATE_DESC} ` +
|
||||
"Use mouse_move first to position the cursor. Call left_mouse_up to release. Errors if the button is already held.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "left_mouse_up",
|
||||
description:
|
||||
`Release the left mouse button at the current cursor position. ${FRONTMOST_GATE_DESC} ` +
|
||||
"Pairs with left_mouse_down. Safe to call even if the button is not currently held.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "computer_batch",
|
||||
description:
|
||||
"Execute a sequence of actions in ONE tool call. Each individual tool call requires a model→API round trip (seconds); " +
|
||||
"batching a predictable sequence eliminates all but one. Use this whenever you can predict the outcome of several actions ahead — " +
|
||||
"e.g. click a field, type into it, press Return. Actions execute sequentially and stop on the first error. " +
|
||||
`${FRONTMOST_GATE_DESC} The frontmost check runs before EACH action inside the batch — if an action opens a non-allowed app, the next action's gate fires and the batch stops there. ` +
|
||||
"Mid-batch screenshot actions are allowed for inspection but coordinates in subsequent clicks always refer to the PRE-BATCH full-screen screenshot.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
actions: {
|
||||
type: "array",
|
||||
minItems: 1,
|
||||
items: BATCH_ACTION_ITEM_SCHEMA,
|
||||
description:
|
||||
'List of actions. Example: [{"action":"left_click","coordinate":[100,200]},{"action":"type","text":"hello"},{"action":"key","text":"Return"}]',
|
||||
},
|
||||
},
|
||||
required: ["actions"],
|
||||
},
|
||||
},
|
||||
|
||||
...(caps.teachMode ? buildTeachTools(coord, installedAppsHint) : []),
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Teach-mode tools. Split out so the spread above stays a single expression;
|
||||
* takes `coord` so `teach_step.anchor`'s description uses the same
|
||||
* frozen coordinate-mode phrasing as click coords, and `installedAppsHint`
|
||||
* so `request_teach_access.apps` gets the same enumeration as
|
||||
* `request_access.apps` (same resolution path → same hint).
|
||||
*/
|
||||
function buildTeachTools(
|
||||
coord: { x: string; y: string },
|
||||
installedAppsHint: string,
|
||||
): Tool[] {
|
||||
// Shared between teach_step (top-level) and teach_batch (inside steps[]
|
||||
// items). Depends on coord, so it lives inside this factory.
|
||||
const teachStepProperties = {
|
||||
explanation: {
|
||||
type: "string",
|
||||
description:
|
||||
"Tooltip body text. Explain what the user is looking at and why it matters. " +
|
||||
"This is the ONLY place the user sees your words — be complete but concise.",
|
||||
},
|
||||
next_preview: {
|
||||
type: "string",
|
||||
description:
|
||||
"One line describing exactly what will happen when the user clicks Next. " +
|
||||
'Example: "Next: I\'ll click Create Bucket and type the name." ' +
|
||||
"Shown below the explanation in a smaller font.",
|
||||
},
|
||||
anchor: {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
minItems: 2,
|
||||
maxItems: 2,
|
||||
description:
|
||||
`(x, y) — where the tooltip arrow points. ${coord.x} ` +
|
||||
"Omit to center the tooltip with no arrow (for general-context steps).",
|
||||
},
|
||||
actions: {
|
||||
type: "array",
|
||||
// Empty allowed — "read this, click Next" steps.
|
||||
items: BATCH_ACTION_ITEM_SCHEMA,
|
||||
description:
|
||||
"Actions to execute when the user clicks Next. Same item schema as computer_batch.actions. " +
|
||||
"Empty array is valid for purely explanatory steps. Actions run sequentially and stop on first error.",
|
||||
},
|
||||
} as const;
|
||||
|
||||
return [
|
||||
{
|
||||
name: "request_teach_access",
|
||||
description:
|
||||
"Request permission to guide the user through a task step-by-step with on-screen tooltips. " +
|
||||
"Use this INSTEAD OF request_access when the user wants to LEARN how to do something " +
|
||||
'(phrases like "teach me", "walk me through", "show me how", "help me learn"). ' +
|
||||
"On approval the main Claude window hides and a fullscreen tooltip overlay appears. " +
|
||||
"You then call teach_step repeatedly; each call shows one tooltip and waits for the user to click Next. " +
|
||||
"Same app-allowlist semantics as request_access, but no clipboard/system-key flags. " +
|
||||
"Teach mode ends automatically when your turn ends.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
apps: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description:
|
||||
'Application display names (e.g. "Slack", "Calendar") or bundle identifiers. Resolved case-insensitively against installed apps.' +
|
||||
installedAppsHint,
|
||||
},
|
||||
reason: {
|
||||
type: "string",
|
||||
description:
|
||||
'What you will be teaching. Shown in the approval dialog as "Claude wants to guide you through {reason}". Keep it short and task-focused.',
|
||||
},
|
||||
},
|
||||
required: ["apps", "reason"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "teach_step",
|
||||
description:
|
||||
"Show one guided-tour tooltip and wait for the user to click Next. On Next, execute the actions, " +
|
||||
"take a fresh screenshot, and return both — you do NOT need a separate screenshot call between steps. " +
|
||||
"The returned image shows the state after your actions ran; anchor the next teach_step against it. " +
|
||||
"IMPORTANT — the user only sees the tooltip during teach mode. Put ALL narration in `explanation`. " +
|
||||
"Text you emit outside teach_step calls is NOT visible until teach mode ends. " +
|
||||
"Pack as many actions as possible into each step's `actions` array — the user waits through " +
|
||||
"the whole round trip between clicks, so one step that fills a form beats five steps that fill one field each. " +
|
||||
"Returns {exited:true} if the user clicks Exit — do not call teach_step again after that. " +
|
||||
"Take an initial screenshot before your FIRST teach_step to anchor it.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: teachStepProperties,
|
||||
required: ["explanation", "next_preview", "actions"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "teach_batch",
|
||||
description:
|
||||
"Queue multiple teach steps in one tool call. Parallels computer_batch: " +
|
||||
"N steps → one model↔API round trip instead of N. Each step still shows a tooltip " +
|
||||
"and waits for the user's Next click, but YOU aren't waiting for a round trip between steps. " +
|
||||
"You can call teach_batch multiple times in one tour — treat each batch as one predictable " +
|
||||
"SEGMENT (typically: all the steps on one page). The returned screenshot shows the state " +
|
||||
"after the batch's final actions; anchor the NEXT teach_batch against it. " +
|
||||
"WITHIN a batch, all anchors and click coordinates refer to the PRE-BATCH screenshot " +
|
||||
"(same invariant as computer_batch) — for steps 2+ in a batch, either omit anchor " +
|
||||
"(centered tooltip) or target elements you know won't have moved. " +
|
||||
"Good pattern: batch 5 tooltips on page A (last step navigates) → read returned screenshot → " +
|
||||
"batch 3 tooltips on page B → done. " +
|
||||
"Returns {exited:true, stepsCompleted:N} if the user clicks Exit — do NOT call again after that; " +
|
||||
"{stepsCompleted, stepFailed, ...} if an action errors mid-batch; " +
|
||||
"otherwise {stepsCompleted, results:[...]} plus a final screenshot. " +
|
||||
"Fall back to individual teach_step calls when you need to react to each intermediate screenshot.",
|
||||
inputSchema: {
|
||||
type: "object" as const,
|
||||
properties: {
|
||||
steps: {
|
||||
type: "array",
|
||||
minItems: 1,
|
||||
items: {
|
||||
type: "object",
|
||||
properties: teachStepProperties,
|
||||
required: ["explanation", "next_preview", "actions"],
|
||||
},
|
||||
description:
|
||||
"Ordered steps. Validated upfront — a typo in step 5 errors before any tooltip shows.",
|
||||
},
|
||||
},
|
||||
required: ["steps"],
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
622
extracted-source/node_modules/@ant/computer-use-mcp/src/types.ts
generated
vendored
622
extracted-source/node_modules/@ant/computer-use-mcp/src/types.ts
generated
vendored
@@ -1,622 +0,0 @@
|
||||
import type {
|
||||
ComputerExecutor,
|
||||
InstalledApp,
|
||||
ScreenshotResult,
|
||||
} from "./executor.js";
|
||||
|
||||
/** `ScreenshotResult` without the base64 blob. The shape hosts persist for
|
||||
* cross-respawn `scaleCoord` survival. */
|
||||
export type ScreenshotDims = Omit<ScreenshotResult, "base64">;
|
||||
|
||||
/** Shape mirrors claude-for-chrome-mcp/src/types.ts:1-7 */
|
||||
export interface Logger {
|
||||
info: (message: string, ...args: unknown[]) => void;
|
||||
error: (message: string, ...args: unknown[]) => void;
|
||||
warn: (message: string, ...args: unknown[]) => void;
|
||||
debug: (message: string, ...args: unknown[]) => void;
|
||||
silly: (message: string, ...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Per-app permission tier. Hardcoded by category at grant time — the
|
||||
* approval dialog displays the tier but the user cannot change it (for now).
|
||||
*
|
||||
* - `"read"` — visible in screenshots, NO interaction (no clicks, no typing).
|
||||
* Browsers land here: the model can read a page that's already open, but
|
||||
* must use the Claude-in-Chrome MCP for any navigation/clicking. Trading
|
||||
* platforms land here too (no CiC alternative — the model asks the user).
|
||||
* - `"click"` — visible + plain left-click, scroll. NO typing/keys,
|
||||
* NO right/middle-click, NO modifier-clicks, NO drag-drop (all text-
|
||||
* injection vectors). Terminals/IDEs land here: the model can click a
|
||||
* Run button or scroll test output, but `type("rm -rf /")` is blocked
|
||||
* and so is right-click→Paste and dragging text onto the terminal.
|
||||
* - `"full"` — visible + click + type/key/paste. Everything else.
|
||||
*
|
||||
* Enforced in `runInputActionGates` via the frontmost-app check: keyboard
|
||||
* actions require `"full"`, mouse actions require `"click"` or higher.
|
||||
*/
|
||||
export type CuAppPermTier = "read" | "click" | "full";
|
||||
|
||||
/**
|
||||
* A single app the user has approved for the current session. Session-scoped
|
||||
* only — there is no "once" or "forever" scope (unlike Chrome's per-domain
|
||||
* three-way). CU has no natural "once" unit; one task = hundreds of clicks.
|
||||
* Mirrors how `chromeAllowedDomains` is a plain `string[]` with no per-item
|
||||
* scope.
|
||||
*/
|
||||
export interface AppGrant {
|
||||
bundleId: string;
|
||||
displayName: string;
|
||||
/** Epoch ms. For Settings-page display ("Granted 3m ago"). */
|
||||
grantedAt: number;
|
||||
/** Undefined → `"full"` (back-compat for pre-tier grants persisted in
|
||||
* session state). */
|
||||
tier?: CuAppPermTier;
|
||||
}
|
||||
|
||||
/** Orthogonal to the app allowlist. */
|
||||
export interface CuGrantFlags {
|
||||
clipboardRead: boolean;
|
||||
clipboardWrite: boolean;
|
||||
/**
|
||||
* When false, the `key` tool rejects combos in `keyBlocklist.ts`
|
||||
* (cmd+q, cmd+tab, cmd+space, cmd+shift+q, ctrl+alt+delete). All other
|
||||
* key sequences work regardless.
|
||||
*/
|
||||
systemKeyCombos: boolean;
|
||||
}
|
||||
|
||||
export const DEFAULT_GRANT_FLAGS: CuGrantFlags = {
|
||||
clipboardRead: false,
|
||||
clipboardWrite: false,
|
||||
systemKeyCombos: false,
|
||||
};
|
||||
|
||||
/**
|
||||
* Host picks via GrowthBook JSON feature `chicago_coordinate_mode`, baked
|
||||
* into tool param descriptions at server-construction time. The model sees
|
||||
* ONE convention and never learns the other exists. `normalized_0_100`
|
||||
* sidesteps the Retina scaleFactor bug class entirely.
|
||||
*/
|
||||
export type CoordinateMode = "pixels" | "normalized_0_100";
|
||||
|
||||
/**
|
||||
* Independent kill switches for subtle/risky ported behaviors. Read from
|
||||
* GrowthBook by the host adapter, consulted in `toolCalls.ts`.
|
||||
*/
|
||||
export interface CuSubGates {
|
||||
/** 9×9 exact-byte staleness guard before click. */
|
||||
pixelValidation: boolean;
|
||||
/** Route `type("foo\nbar")` through clipboard instead of keystroke-by-keystroke. */
|
||||
clipboardPasteMultiline: boolean;
|
||||
/**
|
||||
* Ease-out-cubic mouse glide at 60fps, distance-proportional duration
|
||||
* (2000 px/sec, capped at 0.5s). Adds up to ~0.5s latency
|
||||
* per click. When off, cursor teleports instantly.
|
||||
*/
|
||||
mouseAnimation: boolean;
|
||||
/**
|
||||
* Pre-action sequence: hide non-allowlisted apps, then defocus us (from the
|
||||
* Vercept acquisition). When off, the
|
||||
* frontmost gate fires in the normal case and the model gets stuck — this
|
||||
* is the A/B-test-the-old-broken-behavior switch.
|
||||
*/
|
||||
hideBeforeAction: boolean;
|
||||
/**
|
||||
* Auto-resolve the target display before each screenshot when the
|
||||
* selected display has no allowed-app windows. When on, `handleScreenshot`
|
||||
* uses the atomic Swift path; off → sticks with `selectedDisplayId`.
|
||||
*/
|
||||
autoTargetDisplay: boolean;
|
||||
/**
|
||||
* Stash+clear the clipboard while a tier-"click" app is frontmost.
|
||||
* Closes the gap where a click-tier terminal/IDE has a UI Paste button
|
||||
* that's plain-left-clickable — without this, the tier "click"
|
||||
* keyboard block can be routed around by clicking Paste. Restored when
|
||||
* a non-"click" app becomes frontmost, or at turn end.
|
||||
*/
|
||||
clipboardGuard: boolean;
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Permission request/response (mirror of BridgePermissionRequest, types.ts:77-94)
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
/** One entry per app the model asked for, after name → bundle ID resolution. */
|
||||
export interface ResolvedAppRequest {
|
||||
/** What the model asked for (e.g. "Slack", "com.tinyspeck.slackmacgap"). */
|
||||
requestedName: string;
|
||||
/** The resolved InstalledApp if found, else undefined (shown greyed in the UI). */
|
||||
resolved?: InstalledApp;
|
||||
/** Shell-access-equivalent bundle IDs get a UI warning. See sentinelApps.ts. */
|
||||
isSentinel: boolean;
|
||||
/** Already in the allowlist → skip the checkbox, return in `granted` immediately. */
|
||||
alreadyGranted: boolean;
|
||||
/** Hardcoded tier for this app (browser→"read", terminal→"click", else "full").
|
||||
* The dialog displays this read-only; the renderer passes it through
|
||||
* verbatim in the AppGrant. */
|
||||
proposedTier: CuAppPermTier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Payload for the renderer approval dialog. Rides through the existing
|
||||
* `ToolPermissionRequest.input: unknown` field
|
||||
* (packages/utils/desktop/bridge/common/claude.web.ts:1262) — no IPC schema
|
||||
* change needed.
|
||||
*/
|
||||
export interface CuPermissionRequest {
|
||||
requestId: string;
|
||||
/** Model-provided reason string. Shown prominently in the approval UI. */
|
||||
reason: string;
|
||||
apps: ResolvedAppRequest[];
|
||||
/** What the model asked for. User can toggle independently of apps. */
|
||||
requestedFlags: Partial<CuGrantFlags>;
|
||||
/**
|
||||
* For the "On Windows, Claude can see all apps..." footnote. Taken from
|
||||
* `executor.capabilities.screenshotFiltering` so the renderer doesn't
|
||||
* need to know about platforms.
|
||||
*/
|
||||
screenshotFiltering: "native" | "none";
|
||||
/**
|
||||
* Present only when TCC permissions are NOT yet granted. When present,
|
||||
* the renderer shows a TCC toggle panel (two rows: Accessibility, Screen
|
||||
* Recording) INSTEAD OF the app list. Clicking a row's "Request" button
|
||||
* triggers the OS prompt; the store polls on window-focus and flips the
|
||||
* toggle when the grant is detected. macOS itself prompts the user to
|
||||
* restart after granting Screen Recording — we don't.
|
||||
*/
|
||||
tccState?: {
|
||||
accessibility: boolean;
|
||||
screenRecording: boolean;
|
||||
};
|
||||
/**
|
||||
* Apps with windows on the CU display that aren't in the requested
|
||||
* allowlist. These will be hidden the first time Claude takes an action.
|
||||
* Computed at request_access time — may be slightly stale by the time the
|
||||
* user clicks Allow, but it's a preview, not a contract. Absent when
|
||||
* empty so the renderer can skip the section cleanly.
|
||||
*/
|
||||
willHide?: Array<{ bundleId: string; displayName: string }>;
|
||||
/**
|
||||
* `chicagoAutoUnhide` app preference at request time. The renderer picks
|
||||
* between "...then restored when Claude is done" and "...will be hidden"
|
||||
* copy. Absent when `willHide` is absent (same condition).
|
||||
*/
|
||||
autoUnhideEnabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* What the renderer stuffs into `updatedInput._cuGrants` when the user clicks
|
||||
* "Allow for this session" (mirror of the `_allowAllSites` sentinel at
|
||||
* LocalAgentModeSessionManager.ts:2794).
|
||||
*/
|
||||
export interface CuPermissionResponse {
|
||||
granted: AppGrant[];
|
||||
/** Bundle IDs the user unchecked, or apps that weren't installed. */
|
||||
denied: Array<{ bundleId: string; reason: "user_denied" | "not_installed" }>;
|
||||
flags: CuGrantFlags;
|
||||
/**
|
||||
* Whether the user clicked Allow in THIS dialog. Only set by the
|
||||
* teach-mode handler — regular request_access doesn't need it (the
|
||||
* session manager's `result.behavior` gates the merge there). Needed
|
||||
* because when all requested apps are already granted (skipDialogGrants
|
||||
* non-empty, needDialog empty), Allow and Deny produce identical
|
||||
* `{granted:[], denied:[]}` payloads and the tool handler can't tell
|
||||
* them apart without this. Undefined → legacy/regular path, do not
|
||||
* gate on it.
|
||||
*/
|
||||
userConsented?: boolean;
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Host adapter (mirror of ClaudeForChromeContext, types.ts:33-62)
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Process-lifetime singleton dependencies. Everything that does NOT vary per
|
||||
* tool call. Built once by `apps/desktop/src/main/nest-only/chicago/hostAdapter.ts`.
|
||||
* No Electron imports in this package — the host injects everything.
|
||||
*/
|
||||
export interface ComputerUseHostAdapter {
|
||||
serverName: string;
|
||||
logger: Logger;
|
||||
executor: ComputerExecutor;
|
||||
|
||||
/**
|
||||
* TCC state check — Accessibility + Screen Recording on macOS. Pure check,
|
||||
* no dialog, no relaunch. When either is missing, `request_access` threads
|
||||
* the state through to the renderer which shows a toggle panel; all other
|
||||
* tools return a tool error.
|
||||
*/
|
||||
ensureOsPermissions(): Promise<
|
||||
| { granted: true }
|
||||
| { granted: false; accessibility: boolean; screenRecording: boolean }
|
||||
>;
|
||||
|
||||
/** The Settings-page kill switch (`chicagoEnabled` app preference). */
|
||||
isDisabled(): boolean;
|
||||
|
||||
/**
|
||||
* The `chicagoAutoUnhide` app preference. Consumed by `buildAccessRequest`
|
||||
* to populate `CuPermissionRequest.autoUnhideEnabled` so the renderer's
|
||||
* "will be hidden" copy can say "then restored" only when true.
|
||||
*/
|
||||
getAutoUnhideEnabled(): boolean;
|
||||
|
||||
/**
|
||||
* Sub-gates re-read on every tool call so GrowthBook flips take effect
|
||||
* mid-session without restart.
|
||||
*/
|
||||
getSubGates(): CuSubGates;
|
||||
|
||||
/**
|
||||
* JPEG decode + crop + raw pixel bytes, for the PixelCompare staleness guard.
|
||||
* Injected so this package stays Electron-free. The host implements it via
|
||||
* `nativeImage.createFromBuffer(jpeg).crop(rect).toBitmap()` — Chromium's
|
||||
* decoders, BSD-licensed, no `.node` binary.
|
||||
*
|
||||
* Returns null on decode/crop failure — caller treats null as `skipped`,
|
||||
* click proceeds (validation failure must never block the action).
|
||||
*/
|
||||
cropRawPatch(
|
||||
jpegBase64: string,
|
||||
rect: { x: number; y: number; width: number; height: number },
|
||||
): Buffer | null;
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Session context (getter/callback bag for bindSessionContext)
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Per-session state binding for `bindSessionContext`. Hosts build this once
|
||||
* per session with getters that read fresh from their session store and
|
||||
* callbacks that write back. The returned dispatcher builds
|
||||
* `ComputerUseOverrides` from these getters on every call.
|
||||
*
|
||||
* Callbacks must be set at construction time — `bindSessionContext` reads
|
||||
* them once at bind, not per call.
|
||||
*
|
||||
* The lock hooks are **async** — `bindSessionContext` awaits them before
|
||||
* `handleToolCall`, then passes `checkCuLock: undefined` in overrides so the
|
||||
* sync Gate-3 in `handleToolCall` no-ops. Hosts with in-memory sync locks
|
||||
* (Cowork) wrap them trivially; hosts with cross-process locks (the CLI's
|
||||
* O_EXCL file) call the real async primitive directly.
|
||||
*/
|
||||
export interface ComputerUseSessionContext {
|
||||
// ── Read state fresh per call ──────────────────────────────────────
|
||||
|
||||
getAllowedApps(): readonly AppGrant[];
|
||||
getGrantFlags(): CuGrantFlags;
|
||||
/** Per-user auto-deny list (Settings page). Empty array = none. */
|
||||
getUserDeniedBundleIds(): readonly string[];
|
||||
getSelectedDisplayId(): number | undefined;
|
||||
getDisplayPinnedByModel?(): boolean;
|
||||
getDisplayResolvedForApps?(): string | undefined;
|
||||
getTeachModeActive?(): boolean;
|
||||
/** Dims-only fallback when `lastScreenshot` is unset (cross-respawn).
|
||||
* `bindSessionContext` reconstructs `{...dims, base64: ""}` so scaleCoord
|
||||
* works and pixelCompare correctly skips. */
|
||||
getLastScreenshotDims?(): ScreenshotDims | undefined;
|
||||
|
||||
// ── Write-back callbacks ───────────────────────────────────────────
|
||||
|
||||
/** Shows the approval dialog. Host routes to its UI, awaits user. The
|
||||
* signal is aborted if the tool call finishes before the user answers
|
||||
* (MCP timeout, etc.) — hosts dismiss the dialog on abort. */
|
||||
onPermissionRequest?(
|
||||
req: CuPermissionRequest,
|
||||
signal: AbortSignal,
|
||||
): Promise<CuPermissionResponse>;
|
||||
/** Teach-mode sibling of `onPermissionRequest`. */
|
||||
onTeachPermissionRequest?(
|
||||
req: CuTeachPermissionRequest,
|
||||
signal: AbortSignal,
|
||||
): Promise<CuPermissionResponse>;
|
||||
/** Called by `bindSessionContext` after merging a permission response into
|
||||
* the allowlist (dedupe on bundleId, truthy-only flag spread). Host
|
||||
* persists for resume survival. */
|
||||
onAllowedAppsChanged?(apps: readonly AppGrant[], flags: CuGrantFlags): void;
|
||||
onAppsHidden?(bundleIds: string[]): void;
|
||||
/** Reads the session's clipboardGuard stash. undefined → no stash held. */
|
||||
getClipboardStash?(): string | undefined;
|
||||
/** Writes the clipboardGuard stash. undefined clears it. */
|
||||
onClipboardStashChanged?(stash: string | undefined): void;
|
||||
onResolvedDisplayUpdated?(displayId: number): void;
|
||||
onDisplayPinned?(displayId: number | undefined): void;
|
||||
onDisplayResolvedForApps?(sortedBundleIdsKey: string): void;
|
||||
/** Called after each screenshot. Host persists for respawn survival. */
|
||||
onScreenshotCaptured?(dims: ScreenshotDims): void;
|
||||
onTeachModeActivated?(): void;
|
||||
onTeachStep?(req: TeachStepRequest): Promise<TeachStepResult>;
|
||||
onTeachWorking?(): void;
|
||||
|
||||
// ── Lock (async) ───────────────────────────────────────────────────
|
||||
|
||||
/** At most one session uses CU at a time. Awaited by `bindSessionContext`
|
||||
* before dispatch. Undefined → no lock gating (proceed). */
|
||||
checkCuLock?(): Promise<{ holder: string | undefined; isSelf: boolean }>;
|
||||
/** Take the lock. Called when `checkCuLock` returned `holder: undefined`
|
||||
* on a non-deferring tool. Host emits enter-CU signals here. */
|
||||
acquireCuLock?(): Promise<void>;
|
||||
/** Host-specific lock-held error text. Default is the package's generic
|
||||
* message. The CLI host includes the holder session-ID prefix. */
|
||||
formatLockHeldMessage?(holder: string): string;
|
||||
|
||||
/** User-abort signal. Passed through to `ComputerUseOverrides.isAborted`
|
||||
* for the mid-loop checks in handleComputerBatch / handleType. See that
|
||||
* field for semantics. */
|
||||
isAborted?(): boolean;
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Per-call overrides (mirror of PermissionOverrides, types.ts:97-102)
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Built FRESH on every tool call by `bindSessionContext` from
|
||||
* `ComputerUseSessionContext` getters. This is what lets a singleton MCP
|
||||
* server carry per-session state — the state lives on the host's session
|
||||
* store, not the server.
|
||||
*/
|
||||
export interface ComputerUseOverrides {
|
||||
allowedApps: AppGrant[];
|
||||
grantFlags: CuGrantFlags;
|
||||
coordinateMode: CoordinateMode;
|
||||
|
||||
/**
|
||||
* User-configured auto-deny list (Settings → Desktop app → Computer Use).
|
||||
* Bundle IDs
|
||||
* here are stripped from request_access BEFORE the approval dialog — they
|
||||
* never reach the user for approval regardless of tier. The response tells
|
||||
* the agent to ask the user to remove the app from their deny list in
|
||||
* Settings if access is genuinely needed.
|
||||
*
|
||||
* Per-USER, persists across restarts (read from appPreferences per call,
|
||||
* not session state). Contrast with `allowedApps` which is per-session.
|
||||
* Empty array = no user-configured denies (the default).
|
||||
*/
|
||||
userDeniedBundleIds: readonly string[];
|
||||
|
||||
/**
|
||||
* Display CU operates on; read fresh per call. `scaleCoord` uses the
|
||||
* `originX/Y` snapshotted in `lastScreenshot`, so mid-session switches
|
||||
* only affect the NEXT screenshot/prepare call.
|
||||
*/
|
||||
selectedDisplayId?: number;
|
||||
|
||||
/**
|
||||
* The `request_access` tool handler calls this and awaits. The wrapper
|
||||
* closure in serverDef.ts (mirroring InternalMcpServerManager.ts:131-177)
|
||||
* routes through `handleToolPermission` → IPC → renderer ChicagoApproval.
|
||||
* When it resolves, the wrapper side-effectfully mutates
|
||||
* `InternalServerContext.cuAllowedApps` BEFORE returning here.
|
||||
*
|
||||
* Undefined when the session wasn't wired with a permission handler (e.g.
|
||||
* a future headless mode). `request_access` returns a tool error in that case.
|
||||
*/
|
||||
onPermissionRequest?: (req: CuPermissionRequest) => Promise<CuPermissionResponse>;
|
||||
|
||||
/**
|
||||
* For the pixel-validation staleness guard. The model's-last-screenshot,
|
||||
* stashed by serverDef.ts after each `screenshot` tool call. Undefined on
|
||||
* cold start → pixel validation skipped (click proceeds).
|
||||
*/
|
||||
lastScreenshot?: ScreenshotResult;
|
||||
|
||||
/**
|
||||
* Fired after every `prepareForAction` with the bundle IDs it just hid.
|
||||
* The wrapper closure in serverDef.ts accumulates these into
|
||||
* `Session.cuHiddenDuringTurn` via a write-through callback (same pattern
|
||||
* as `onCuPermissionUpdated`). At turn end (`sdkMessage.type === "result"`),
|
||||
* if the `chicagoAutoUnhide` setting is on, everything in the set is
|
||||
* unhidden. Set is cleared regardless of the setting so it doesn't leak
|
||||
* across turns.
|
||||
*
|
||||
* Undefined when the session wasn't wired with a tracker — unhide just
|
||||
* doesn't happen.
|
||||
*/
|
||||
onAppsHidden?: (bundleIds: string[]) => void;
|
||||
|
||||
/**
|
||||
* Reads the clipboardGuard stash from session state. `undefined` means no
|
||||
* stash is held — `syncClipboardStash` stashes on first entry to click-tier
|
||||
* and clears on restore. Sibling of the `cuHiddenDuringTurn` getter pattern
|
||||
* — state lives on the host's session, not module-level here.
|
||||
*/
|
||||
getClipboardStash?: () => string | undefined;
|
||||
|
||||
/**
|
||||
* Writes the clipboardGuard stash to session state. `undefined` clears.
|
||||
* Sibling of `onAppsHidden` — the wrapper closure writes through to
|
||||
* `Session.cuClipboardStash`. At turn end the host reads + clears it
|
||||
* directly and restores via Electron's `clipboard.writeText` (no nest-only
|
||||
* import surface).
|
||||
*/
|
||||
onClipboardStashChanged?: (stash: string | undefined) => void;
|
||||
|
||||
/**
|
||||
* Write the resolver's picked display back to session so teach overlay
|
||||
* positioning and subsequent non-resolver calls use the same display.
|
||||
* Fired by `handleScreenshot` in the atomic `autoTargetDisplay` path when
|
||||
* `resolvePrepareCapture`'s pick differs from `selectedDisplayId`.
|
||||
* Fire-and-forget.
|
||||
*/
|
||||
onResolvedDisplayUpdated?: (displayId: number) => void;
|
||||
|
||||
/**
|
||||
* Set when the model explicitly picked a display via `switch_display`.
|
||||
* When true, `handleScreenshot` passes `autoResolve: false` so the Swift
|
||||
* resolver honors `selectedDisplayId` directly (straight cuDisplayInfo
|
||||
* passthrough) instead of running the co-location/chase chain. The
|
||||
* resolver's Step 2 ("host + allowed co-located → host") otherwise
|
||||
* overrides any `selectedDisplayId` whenever an allowed app shares the
|
||||
* host's monitor.
|
||||
*/
|
||||
displayPinnedByModel?: boolean;
|
||||
|
||||
/**
|
||||
* Write the model's explicit display pick to session. `displayId:
|
||||
* undefined` clears both `selectedDisplayId` and the pin (back to auto).
|
||||
* Sibling of `onResolvedDisplayUpdated` but also sets the pin flag —
|
||||
* the two are semantically distinct (resolver-picked vs model-picked).
|
||||
*/
|
||||
onDisplayPinned?: (displayId: number | undefined) => void;
|
||||
|
||||
/**
|
||||
* Sorted comma-joined bundle-ID set the display was last auto-resolved
|
||||
* for. `handleScreenshot` compares this to the current allowed set and
|
||||
* only passes `autoResolve: true` when they differ — so the resolver
|
||||
* doesn't yank the display on every screenshot, only when the app set
|
||||
* has changed since the last resolve (or manual switch).
|
||||
*/
|
||||
displayResolvedForApps?: string;
|
||||
|
||||
/**
|
||||
* Records which app set the current display selection was made for. Fired
|
||||
* alongside `onResolvedDisplayUpdated` when the resolver picks, so the next
|
||||
* screenshot sees a matching set and skips auto-resolve.
|
||||
*/
|
||||
onDisplayResolvedForApps?: (sortedBundleIdsKey: string) => void;
|
||||
|
||||
/**
|
||||
* Global CU lock — at most one session actively uses CU at a time. Checked
|
||||
* in `handleToolCall` after kill-switch/TCC, before dispatch. Every CU tool
|
||||
* including `request_access` goes through it.
|
||||
*
|
||||
* - `holder === undefined` → lock is free, safe to acquire
|
||||
* - `isSelf === true` → this session already holds it (no-op, proceed)
|
||||
* - `holder !== undefined && !isSelf` → blocked, return tool error
|
||||
*
|
||||
* `undefined` callback → lock system not wired (e.g. CCD). Proceed without
|
||||
* gating — absence of the mechanism ≠ locked out.
|
||||
*
|
||||
* The host manages release (on session idle/stop/archive) — this package
|
||||
* never releases.
|
||||
*/
|
||||
checkCuLock?: () => { holder: string | undefined; isSelf: boolean };
|
||||
|
||||
/**
|
||||
* Take the lock for this session. `handleToolCall` calls this exactly once
|
||||
* per turn, on the FIRST CU tool call when `checkCuLock().holder` is
|
||||
* undefined. No-op if already held (defensive — the check should have
|
||||
* short-circuited). Host emits an event the overlay listens to.
|
||||
*/
|
||||
acquireCuLock?: () => void;
|
||||
|
||||
/**
|
||||
* User-abort signal. Checked mid-iteration inside `handleComputerBatch`
|
||||
* and `handleType`'s grapheme loop so an in-flight batch/type stops
|
||||
* promptly on overlay Stop instead of running to completion after the
|
||||
* host has already abandoned the tool result.
|
||||
*
|
||||
* Undefined → never aborts (e.g. unwired host). Live per-check read —
|
||||
* same lazy-getter pattern as `checkCuLock`.
|
||||
*/
|
||||
isAborted?: () => boolean;
|
||||
|
||||
// ── Teach mode ───────────────────────────────────────────────────────
|
||||
// Wired only when the host's teachModeEnabled gate is on. All five
|
||||
// undefined → `request_teach_access` / `teach_step` return tool errors
|
||||
// and teach mode is effectively off.
|
||||
|
||||
/**
|
||||
* Sibling of `onPermissionRequest`. Same blocking-await-on-renderer-dialog
|
||||
* semantics, but routes to ComputerUseTeachApproval.tsx (which explains
|
||||
* the window-hides-during-guide behavior) instead of ComputerUseApproval.
|
||||
* The wrapper closure in serverDef.ts writes grants through to session state
|
||||
* via `onCuPermissionUpdated` exactly as `onPermissionRequest` does.
|
||||
*/
|
||||
onTeachPermissionRequest?: (
|
||||
req: CuTeachPermissionRequest,
|
||||
) => Promise<CuPermissionResponse>;
|
||||
|
||||
/**
|
||||
* Called by `handleRequestTeachAccess` after the user approves and at least
|
||||
* one app was granted. Host sets `session.teachModeActive = true`, emits
|
||||
* `teachModeChanged` → teach controller hides the main window and shows the
|
||||
* fullscreen overlay. Cleared by the host on turn end (`transitionTo("idle")`)
|
||||
* alongside the CU lock release.
|
||||
*/
|
||||
onTeachModeActivated?: () => void;
|
||||
|
||||
/**
|
||||
* Read by `handleRequestAccess` and `handleRequestTeachAccess` to
|
||||
* short-circuit with a clear tool error when teach mode is active. The
|
||||
* main window is hidden during teach mode, so permission dialogs render
|
||||
* invisibly and handleToolPermission blocks forever on an invisible
|
||||
* prompt. Better to tell the model to exit teach mode first. Getter
|
||||
* (not a boolean field) because teach mode state lives on the session,
|
||||
* not on this per-call overrides object.
|
||||
*/
|
||||
getTeachModeActive?: () => boolean;
|
||||
|
||||
/**
|
||||
* Called by `handleTeachStep` with the scaled anchor + text. Host stores
|
||||
* the resolver, emits `teachStepRequested` → teach controller pushes the
|
||||
* payload to the overlay → user reads, clicks Next → IPC → host calls the
|
||||
* stored resolver → this promise resolves. `{action: "exit"}` when the user
|
||||
* clicks Exit (or the turn is interrupted) — `handleTeachStep` short-circuits
|
||||
* without executing actions.
|
||||
*
|
||||
* Same blocking-promise pattern as `onPermissionRequest`, but resolved by
|
||||
* the teach overlay's own preload (not the main renderer's tool-approval UI).
|
||||
*/
|
||||
onTeachStep?: (req: TeachStepRequest) => Promise<TeachStepResult>;
|
||||
|
||||
/**
|
||||
* Called immediately after `onTeachStep` resolves with "next", before
|
||||
* action dispatch begins. Host emits `teachStepWorking` → overlay flips to
|
||||
* the spinner state (Next button gone, Exit stays, "Working…" + rotating
|
||||
* notch). The next `onTeachStep` call replaces the spinner with the new
|
||||
* tooltip content.
|
||||
*/
|
||||
onTeachWorking?: () => void;
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Teach mode (guided-tour tooltips with Next-button action execution)
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Payload the host pushes to the teach overlay BrowserWindow. Built by
|
||||
* `handleTeachStep` in toolCalls.ts from the model's `teach_step` args.
|
||||
*
|
||||
* `anchorLogical` here is POST-`scaleCoord` — **full-display** logical
|
||||
* macOS points (origin = monitor top-left, menu bar included, since
|
||||
* cuDisplayInfo returns CGDisplayBounds). The overlay window is positioned
|
||||
* at `workArea.{x,y}` (excludes menu bar/Dock), so `updateTeachStep` in
|
||||
* teach/window.ts subtracts the workArea offset before IPC so the HTML's
|
||||
* CSS coords match.
|
||||
*/
|
||||
export interface TeachStepRequest {
|
||||
explanation: string;
|
||||
nextPreview: string;
|
||||
/** Full-display logical points. Undefined → overlay centers the tooltip, hides the arrow. */
|
||||
anchorLogical?: { x: number; y: number };
|
||||
}
|
||||
|
||||
export type TeachStepResult = { action: "next" } | { action: "exit" };
|
||||
|
||||
/**
|
||||
* Payload for the renderer's ComputerUseTeachApproval dialog. Rides through
|
||||
* `ToolPermissionRequest.input: unknown` same as `CuPermissionRequest`.
|
||||
* Separate type (not a flag on `CuPermissionRequest`) so the two approval
|
||||
* components can narrow independently and the teach dialog is free to drop
|
||||
* fields it doesn't render (no grant-flag checkboxes in teach mode).
|
||||
*/
|
||||
export interface CuTeachPermissionRequest {
|
||||
requestId: string;
|
||||
/** Model-provided reason. Shown in the dialog headline ("guide you through {reason}"). */
|
||||
reason: string;
|
||||
apps: ResolvedAppRequest[];
|
||||
screenshotFiltering: "native" | "none";
|
||||
/** Present only when TCC is ungranted — same semantics as `CuPermissionRequest.tccState`. */
|
||||
tccState?: {
|
||||
accessibility: boolean;
|
||||
screenRecording: boolean;
|
||||
};
|
||||
willHide?: Array<{ bundleId: string; displayName: string }>;
|
||||
/** Same semantics as `CuPermissionRequest.autoUnhideEnabled`. */
|
||||
autoUnhideEnabled?: boolean;
|
||||
}
|
||||
23
extracted-source/node_modules/@ant/computer-use-swift/js/index.js
generated
vendored
23
extracted-source/node_modules/@ant/computer-use-swift/js/index.js
generated
vendored
@@ -1,23 +0,0 @@
|
||||
const path = require("path");
|
||||
|
||||
if (process.platform !== "darwin") {
|
||||
throw new Error("@ant/computer-use-swift is only available on macOS");
|
||||
}
|
||||
|
||||
// COMPUTER_USE_SWIFT_NODE_PATH: escape hatch for bundlers. Bun's --compile
|
||||
// embeds the .node as an asset, not in a node_modules tree — __dirname is the
|
||||
// exe dir and ../prebuilds/ doesn't exist. The consuming build bakes this var
|
||||
// to the embedded asset's path. Unset → normal node_modules layout.
|
||||
//
|
||||
// Four methods use `Task { @MainActor in ... }` (captureExcluding,
|
||||
// captureRegion, apps.listInstalled, resolvePrepareCapture) which enqueue
|
||||
// onto DispatchQueue.main. Electron drains that queue via CFRunLoop; libuv
|
||||
// (Node/bun) does not — the promises hang. Consumers running under libuv
|
||||
// must pump `_drainMainRunLoop` via setInterval while those promises are
|
||||
// pending. Consumers under Electron don't need to (CFRunLoop drains
|
||||
// automatically).
|
||||
const native = require(
|
||||
process.env.COMPUTER_USE_SWIFT_NODE_PATH ??
|
||||
path.resolve(__dirname, "../prebuilds/computer_use.node"),
|
||||
);
|
||||
module.exports = native.computerUse;
|
||||
163
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/AWS_restJson1.mjs
generated
vendored
163
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/AWS_restJson1.mjs
generated
vendored
@@ -1,163 +0,0 @@
|
||||
// Copied from https://github.com/aws/aws-sdk-js-v3/blob/bee66fbd2a519a16b57c787b2689af857af720af/clients/client-bedrock-runtime/src/protocols/Aws_restJson1.ts
|
||||
// Modified to remove unnecessary code (we only need to call `de_ResponseStream`) and to adjust imports.
|
||||
import { collectBody, decorateServiceException as __decorateServiceException, expectInt32 as __expectInt32, expectString as __expectString, map, take, } from '@smithy/smithy-client';
|
||||
import { InternalServerException, ModelStreamErrorException, ThrottlingException, ValidationException, } from '@aws-sdk/client-bedrock-runtime';
|
||||
/**
|
||||
* deserializeAws_restJson1InternalServerExceptionRes
|
||||
*/
|
||||
const de_InternalServerExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new InternalServerException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ModelStreamErrorExceptionRes
|
||||
*/
|
||||
const de_ModelStreamErrorExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
originalMessage: __expectString,
|
||||
originalStatusCode: __expectInt32,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ModelStreamErrorException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ThrottlingExceptionRes
|
||||
*/
|
||||
const de_ThrottlingExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ThrottlingException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ValidationExceptionRes
|
||||
*/
|
||||
const de_ValidationExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ValidationException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ResponseStream
|
||||
*/
|
||||
export const de_ResponseStream = (output, context) => {
|
||||
return context.eventStreamMarshaller.deserialize(output, async (event) => {
|
||||
if (event['chunk'] != null) {
|
||||
return {
|
||||
chunk: await de_PayloadPart_event(event['chunk'], context),
|
||||
};
|
||||
}
|
||||
if (event['internalServerException'] != null) {
|
||||
return {
|
||||
internalServerException: await de_InternalServerException_event(event['internalServerException'], context),
|
||||
};
|
||||
}
|
||||
if (event['modelStreamErrorException'] != null) {
|
||||
return {
|
||||
modelStreamErrorException: await de_ModelStreamErrorException_event(event['modelStreamErrorException'], context),
|
||||
};
|
||||
}
|
||||
if (event['validationException'] != null) {
|
||||
return {
|
||||
validationException: await de_ValidationException_event(event['validationException'], context),
|
||||
};
|
||||
}
|
||||
if (event['throttlingException'] != null) {
|
||||
return {
|
||||
throttlingException: await de_ThrottlingException_event(event['throttlingException'], context),
|
||||
};
|
||||
}
|
||||
return { $unknown: output };
|
||||
});
|
||||
};
|
||||
const de_InternalServerException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_InternalServerExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_ModelStreamErrorException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ModelStreamErrorExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_PayloadPart_event = async (output, context) => {
|
||||
const contents = {};
|
||||
const data = await parseBody(output.body, context);
|
||||
Object.assign(contents, de_PayloadPart(data, context));
|
||||
return contents;
|
||||
};
|
||||
const de_ThrottlingException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ThrottlingExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_ValidationException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ValidationExceptionRes(parsedOutput, context);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1PayloadPart
|
||||
*/
|
||||
const de_PayloadPart = (output, context) => {
|
||||
return take(output, {
|
||||
bytes: context.base64Decoder,
|
||||
});
|
||||
};
|
||||
const deserializeMetadata = (output) => ({
|
||||
httpStatusCode: output.statusCode,
|
||||
requestId: output.headers['x-amzn-requestid'] ??
|
||||
output.headers['x-amzn-request-id'] ??
|
||||
output.headers['x-amz-request-id'] ??
|
||||
'',
|
||||
extendedRequestId: output.headers['x-amz-id-2'] ?? '',
|
||||
cfId: output.headers['x-amz-cf-id'] ?? '',
|
||||
});
|
||||
// Encode Uint8Array data into string with utf-8.
|
||||
const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body));
|
||||
const parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => {
|
||||
if (encoded.length) {
|
||||
return JSON.parse(encoded);
|
||||
}
|
||||
return {};
|
||||
});
|
||||
//# sourceMappingURL=AWS_restJson1.mjs.map
|
||||
124
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/client.mjs
generated
vendored
124
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/client.mjs
generated
vendored
@@ -1,124 +0,0 @@
|
||||
import { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
import * as Resources from '@anthropic-ai/sdk/resources/index';
|
||||
import { getAuthHeaders } from "./core/auth.mjs";
|
||||
import { Stream } from "./core/streaming.mjs";
|
||||
import { readEnv } from "./internal/utils/env.mjs";
|
||||
import { isObj } from "./internal/utils/values.mjs";
|
||||
import { buildHeaders } from "./internal/headers.mjs";
|
||||
import { path } from "./internal/utils/path.mjs";
|
||||
export { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
const DEFAULT_VERSION = 'bedrock-2023-05-31';
|
||||
const MODEL_ENDPOINTS = new Set(['/v1/complete', '/v1/messages', '/v1/messages?beta=true']);
|
||||
/** API Client for interfacing with the Anthropic Bedrock API. */
|
||||
export class AnthropicBedrock extends BaseAnthropic {
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic Bedrock API.
|
||||
*
|
||||
* @param {string | null | undefined} [opts.awsSecretKey]
|
||||
* @param {string | null | undefined} [opts.awsAccessKey]
|
||||
* @param {string | undefined} [opts.awsRegion=process.env['AWS_REGION'] ?? us-east-1]
|
||||
* @param {string | null | undefined} [opts.awsSessionToken]
|
||||
* @param {(() => Promise<AwsCredentialIdentityProvider>) | null} [opts.providerChainResolver] - Custom provider chain resolver for AWS credentials. Useful for non-Node environments.
|
||||
* @param {string} [opts.baseURL=process.env['ANTHROPIC_BEDROCK_BASE_URL'] ?? https://bedrock-runtime.${this.awsRegion}.amazonaws.com] - Override the default base URL for the API.
|
||||
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
||||
* @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls.
|
||||
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
||||
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
||||
* @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API.
|
||||
* @param {Record<string, string | undefined>} opts.defaultQuery - Default query parameters to include with every request to the API.
|
||||
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
||||
* @param {boolean} [opts.skipAuth=false] - Skip authentication for this request. This is useful if you have an internal proxy that handles authentication for you.
|
||||
*/
|
||||
constructor({ awsRegion = readEnv('AWS_REGION') ?? 'us-east-1', baseURL = readEnv('ANTHROPIC_BEDROCK_BASE_URL') ?? `https://bedrock-runtime.${awsRegion}.amazonaws.com`, awsSecretKey = null, awsAccessKey = null, awsSessionToken = null, providerChainResolver = null, ...opts } = {}) {
|
||||
super({
|
||||
baseURL,
|
||||
...opts,
|
||||
});
|
||||
this.skipAuth = false;
|
||||
this.messages = makeMessagesResource(this);
|
||||
this.completions = new Resources.Completions(this);
|
||||
this.beta = makeBetaResource(this);
|
||||
this.awsSecretKey = awsSecretKey;
|
||||
this.awsAccessKey = awsAccessKey;
|
||||
this.awsRegion = awsRegion;
|
||||
this.awsSessionToken = awsSessionToken;
|
||||
this.skipAuth = opts.skipAuth ?? false;
|
||||
this.providerChainResolver = providerChainResolver;
|
||||
}
|
||||
validateHeaders() {
|
||||
// auth validation is handled in prepareRequest since it needs to be async
|
||||
}
|
||||
async prepareRequest(request, { url, options }) {
|
||||
if (this.skipAuth) {
|
||||
return;
|
||||
}
|
||||
const regionName = this.awsRegion;
|
||||
if (!regionName) {
|
||||
throw new Error('Expected `awsRegion` option to be passed to the client or the `AWS_REGION` environment variable to be present');
|
||||
}
|
||||
const headers = await getAuthHeaders(request, {
|
||||
url,
|
||||
regionName,
|
||||
awsAccessKey: this.awsAccessKey,
|
||||
awsSecretKey: this.awsSecretKey,
|
||||
awsSessionToken: this.awsSessionToken,
|
||||
fetchOptions: this.fetchOptions,
|
||||
providerChainResolver: this.providerChainResolver,
|
||||
});
|
||||
request.headers = buildHeaders([headers, request.headers]).values;
|
||||
}
|
||||
async buildRequest(options) {
|
||||
options.__streamClass = Stream;
|
||||
if (isObj(options.body)) {
|
||||
// create a shallow copy of the request body so that code that mutates it later
|
||||
// doesn't mutate the original user-provided object
|
||||
options.body = { ...options.body };
|
||||
}
|
||||
if (isObj(options.body)) {
|
||||
if (!options.body['anthropic_version']) {
|
||||
options.body['anthropic_version'] = DEFAULT_VERSION;
|
||||
}
|
||||
if (options.headers && !options.body['anthropic_beta']) {
|
||||
const betas = buildHeaders([options.headers]).values.get('anthropic-beta');
|
||||
if (betas != null) {
|
||||
options.body['anthropic_beta'] = betas.split(',');
|
||||
}
|
||||
}
|
||||
}
|
||||
if (MODEL_ENDPOINTS.has(options.path) && options.method === 'post') {
|
||||
if (!isObj(options.body)) {
|
||||
throw new Error('Expected request body to be an object for post /v1/messages');
|
||||
}
|
||||
const model = options.body['model'];
|
||||
options.body['model'] = undefined;
|
||||
const stream = options.body['stream'];
|
||||
options.body['stream'] = undefined;
|
||||
if (stream) {
|
||||
options.path = path `/model/${model}/invoke-with-response-stream`;
|
||||
}
|
||||
else {
|
||||
options.path = path `/model/${model}/invoke`;
|
||||
}
|
||||
}
|
||||
return super.buildRequest(options);
|
||||
}
|
||||
}
|
||||
function makeMessagesResource(client) {
|
||||
const resource = new Resources.Messages(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.batches;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.countTokens;
|
||||
return resource;
|
||||
}
|
||||
function makeBetaResource(client) {
|
||||
const resource = new Resources.Beta(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.promptCaching;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.batches;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.countTokens;
|
||||
return resource;
|
||||
}
|
||||
//# sourceMappingURL=client.mjs.map
|
||||
82
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/auth.mjs
generated
vendored
82
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/auth.mjs
generated
vendored
@@ -1,82 +0,0 @@
|
||||
import { Sha256 } from '@aws-crypto/sha256-js';
|
||||
import { FetchHttpHandler } from '@smithy/fetch-http-handler';
|
||||
import { HttpRequest } from '@smithy/protocol-http';
|
||||
import { SignatureV4 } from '@smithy/signature-v4';
|
||||
import assert from 'assert';
|
||||
const DEFAULT_PROVIDER_CHAIN_RESOLVER = () => import('@aws-sdk/credential-providers').then(({ fromNodeProviderChain }) => fromNodeProviderChain({
|
||||
clientConfig: {
|
||||
requestHandler: new FetchHttpHandler({
|
||||
requestInit: (httpRequest) => {
|
||||
return {
|
||||
...httpRequest,
|
||||
};
|
||||
},
|
||||
}),
|
||||
},
|
||||
}))
|
||||
.catch((error) => {
|
||||
throw new Error(`Failed to import '@aws-sdk/credential-providers'.` +
|
||||
`You can provide a custom \`providerChainResolver\` in the client options if your runtime does not have access to '@aws-sdk/credential-providers': ` +
|
||||
`\`new AnthropicBedrock({ providerChainResolver })\` ` +
|
||||
`Original error: ${error.message}`);
|
||||
});
|
||||
export const getAuthHeaders = async (req, props) => {
|
||||
assert(req.method, 'Expected request method property to be set');
|
||||
const providerChain = await (props.providerChainResolver ?
|
||||
props.providerChainResolver()
|
||||
: DEFAULT_PROVIDER_CHAIN_RESOLVER());
|
||||
const credentials = await withTempEnv(() => {
|
||||
// Temporarily set the appropriate environment variables if we've been
|
||||
// explicitly given credentials so that the credentials provider can
|
||||
// resolve them.
|
||||
//
|
||||
// Note: the environment provider is only not run first if the `AWS_PROFILE`
|
||||
// environment variable is set.
|
||||
// https://github.com/aws/aws-sdk-js-v3/blob/44a18a34b2c93feccdfcd162928d13e6dbdcaf30/packages/credential-provider-node/src/defaultProvider.ts#L49
|
||||
if (props.awsAccessKey) {
|
||||
process.env['AWS_ACCESS_KEY_ID'] = props.awsAccessKey;
|
||||
}
|
||||
if (props.awsSecretKey) {
|
||||
process.env['AWS_SECRET_ACCESS_KEY'] = props.awsSecretKey;
|
||||
}
|
||||
if (props.awsSessionToken) {
|
||||
process.env['AWS_SESSION_TOKEN'] = props.awsSessionToken;
|
||||
}
|
||||
}, () => providerChain());
|
||||
const signer = new SignatureV4({
|
||||
service: 'bedrock',
|
||||
region: props.regionName,
|
||||
credentials,
|
||||
sha256: Sha256,
|
||||
});
|
||||
const url = new URL(props.url);
|
||||
const headers = !req.headers ? {}
|
||||
: Symbol.iterator in req.headers ?
|
||||
Object.fromEntries(Array.from(req.headers).map((header) => [...header]))
|
||||
: { ...req.headers };
|
||||
// The connection header may be stripped by a proxy somewhere, so the receiver
|
||||
// of this message may not see this header, so we remove it from the set of headers
|
||||
// that are signed.
|
||||
delete headers['connection'];
|
||||
headers['host'] = url.hostname;
|
||||
const request = new HttpRequest({
|
||||
method: req.method.toUpperCase(),
|
||||
protocol: url.protocol,
|
||||
path: url.pathname,
|
||||
headers,
|
||||
body: req.body,
|
||||
});
|
||||
const signed = await signer.sign(request);
|
||||
return signed.headers;
|
||||
};
|
||||
const withTempEnv = async (updateEnv, fn) => {
|
||||
const previousEnv = { ...process.env };
|
||||
try {
|
||||
updateEnv();
|
||||
return await fn();
|
||||
}
|
||||
finally {
|
||||
process.env = previousEnv;
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=auth.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/error.mjs
generated
vendored
2
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/error.mjs
generated
vendored
@@ -1,2 +0,0 @@
|
||||
export * from '@anthropic-ai/sdk/core/error';
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
108
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/streaming.mjs
generated
vendored
108
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/streaming.mjs
generated
vendored
@@ -1,108 +0,0 @@
|
||||
import { EventStreamMarshaller } from '@smithy/eventstream-serde-node';
|
||||
import { fromBase64, toBase64 } from '@smithy/util-base64';
|
||||
import { streamCollector } from '@smithy/fetch-http-handler';
|
||||
import { Stream as CoreStream } from '@anthropic-ai/sdk/streaming';
|
||||
import { AnthropicError } from '@anthropic-ai/sdk/error';
|
||||
import { APIError } from '@anthropic-ai/sdk';
|
||||
import { de_ResponseStream } from "../AWS_restJson1.mjs";
|
||||
import { ReadableStreamToAsyncIterable } from "../internal/shims.mjs";
|
||||
import { safeJSON } from "../internal/utils/values.mjs";
|
||||
import { loggerFor } from "../internal/utils/log.mjs";
|
||||
export const toUtf8 = (input) => new TextDecoder('utf-8').decode(input);
|
||||
export const fromUtf8 = (input) => new TextEncoder().encode(input);
|
||||
// `de_ResponseStream` parses a Bedrock response stream and emits events as they are found.
|
||||
// It requires a "context" argument which has many fields, but for what we're using it for
|
||||
// it only needs this.
|
||||
export const getMinimalSerdeContext = () => {
|
||||
const marshaller = new EventStreamMarshaller({ utf8Encoder: toUtf8, utf8Decoder: fromUtf8 });
|
||||
return {
|
||||
base64Decoder: fromBase64,
|
||||
base64Encoder: toBase64,
|
||||
utf8Decoder: fromUtf8,
|
||||
utf8Encoder: toUtf8,
|
||||
eventStreamMarshaller: marshaller,
|
||||
streamCollector: streamCollector,
|
||||
};
|
||||
};
|
||||
export class Stream extends CoreStream {
|
||||
static fromSSEResponse(response, controller, client) {
|
||||
let consumed = false;
|
||||
const logger = client ? loggerFor(client) : console;
|
||||
async function* iterMessages() {
|
||||
if (!response.body) {
|
||||
controller.abort();
|
||||
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
||||
}
|
||||
const responseBodyIter = ReadableStreamToAsyncIterable(response.body);
|
||||
const eventStream = de_ResponseStream(responseBodyIter, getMinimalSerdeContext());
|
||||
for await (const event of eventStream) {
|
||||
if (event.chunk && event.chunk.bytes) {
|
||||
const s = toUtf8(event.chunk.bytes);
|
||||
yield { event: 'chunk', data: s, raw: [] };
|
||||
}
|
||||
else if (event.internalServerException) {
|
||||
yield { event: 'error', data: 'InternalServerException', raw: [] };
|
||||
}
|
||||
else if (event.modelStreamErrorException) {
|
||||
yield { event: 'error', data: 'ModelStreamErrorException', raw: [] };
|
||||
}
|
||||
else if (event.validationException) {
|
||||
yield { event: 'error', data: 'ValidationException', raw: [] };
|
||||
}
|
||||
else if (event.throttlingException) {
|
||||
yield { event: 'error', data: 'ThrottlingException', raw: [] };
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: this function is copied entirely from the core SDK
|
||||
async function* iterator() {
|
||||
if (consumed) {
|
||||
throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
|
||||
}
|
||||
consumed = true;
|
||||
let done = false;
|
||||
try {
|
||||
for await (const sse of iterMessages()) {
|
||||
if (sse.event === 'chunk') {
|
||||
try {
|
||||
yield JSON.parse(sse.data);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(`Could not parse message into JSON:`, sse.data);
|
||||
logger.error(`From chunk:`, sse.raw);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (sse.event === 'error') {
|
||||
const errText = sse.data;
|
||||
const errJSON = safeJSON(errText);
|
||||
const errMessage = errJSON ? undefined : errText;
|
||||
throw APIError.generate(undefined, errJSON, errMessage, response.headers);
|
||||
}
|
||||
}
|
||||
done = true;
|
||||
}
|
||||
catch (e) {
|
||||
// If the user calls `stream.controller.abort()`, we should exit without throwing.
|
||||
if (isAbortError(e))
|
||||
return;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
// If the user `break`s, abort the ongoing request.
|
||||
if (!done)
|
||||
controller.abort();
|
||||
}
|
||||
}
|
||||
return new Stream(iterator, controller);
|
||||
}
|
||||
}
|
||||
function isAbortError(err) {
|
||||
return (typeof err === 'object' &&
|
||||
err !== null &&
|
||||
// Spec-compliant fetch implementations
|
||||
(('name' in err && err.name === 'AbortError') ||
|
||||
// Expo fetch
|
||||
('message' in err && String(err.message).includes('FetchRequestCanceledException'))));
|
||||
}
|
||||
//# sourceMappingURL=streaming.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/index.mjs
generated
vendored
3
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/index.mjs
generated
vendored
@@ -1,3 +0,0 @@
|
||||
export * from "./client.mjs";
|
||||
export { AnthropicBedrock as default } from "./client.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/headers.mjs
generated
vendored
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/headers.mjs
generated
vendored
@@ -1,74 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { isReadonlyArray } from "./utils/values.mjs";
|
||||
const brand_privateNullableHeaders = Symbol.for('brand.privateNullableHeaders');
|
||||
function* iterateHeaders(headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
if (brand_privateNullableHeaders in headers) {
|
||||
const { values, nulls } = headers;
|
||||
yield* values.entries();
|
||||
for (const name of nulls) {
|
||||
yield [name, null];
|
||||
}
|
||||
return;
|
||||
}
|
||||
let shouldClear = false;
|
||||
let iter;
|
||||
if (headers instanceof Headers) {
|
||||
iter = headers.entries();
|
||||
}
|
||||
else if (isReadonlyArray(headers)) {
|
||||
iter = headers;
|
||||
}
|
||||
else {
|
||||
shouldClear = true;
|
||||
iter = Object.entries(headers ?? {});
|
||||
}
|
||||
for (let row of iter) {
|
||||
const name = row[0];
|
||||
if (typeof name !== 'string')
|
||||
throw new TypeError('expected header name to be a string');
|
||||
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
||||
let didClear = false;
|
||||
for (const value of values) {
|
||||
if (value === undefined)
|
||||
continue;
|
||||
// Objects keys always overwrite older headers, they never append.
|
||||
// Yield a null to clear the header before adding the new values.
|
||||
if (shouldClear && !didClear) {
|
||||
didClear = true;
|
||||
yield [name, null];
|
||||
}
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
export const buildHeaders = (newHeaders) => {
|
||||
const targetHeaders = new Headers();
|
||||
const nullHeaders = new Set();
|
||||
for (const headers of newHeaders) {
|
||||
const seenHeaders = new Set();
|
||||
for (const [name, value] of iterateHeaders(headers)) {
|
||||
const lowerName = name.toLowerCase();
|
||||
if (!seenHeaders.has(lowerName)) {
|
||||
targetHeaders.delete(name);
|
||||
seenHeaders.add(lowerName);
|
||||
}
|
||||
if (value === null) {
|
||||
targetHeaders.delete(name);
|
||||
nullHeaders.add(lowerName);
|
||||
}
|
||||
else {
|
||||
targetHeaders.append(name, value);
|
||||
nullHeaders.delete(lowerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
||||
};
|
||||
export const isEmptyHeaders = (headers) => {
|
||||
for (const _ of iterateHeaders(headers))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
//# sourceMappingURL=headers.mjs.map
|
||||
85
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/shims.mjs
generated
vendored
85
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/shims.mjs
generated
vendored
@@ -1,85 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function getDefaultFetch() {
|
||||
if (typeof fetch !== 'undefined') {
|
||||
return fetch;
|
||||
}
|
||||
throw new Error('`fetch` is not defined as a global; Either pass `fetch` to the client, `new Anthropic({ fetch })` or polyfill the global, `globalThis.fetch = fetch`');
|
||||
}
|
||||
export function makeReadableStream(...args) {
|
||||
const ReadableStream = globalThis.ReadableStream;
|
||||
if (typeof ReadableStream === 'undefined') {
|
||||
// Note: All of the platforms / runtimes we officially support already define
|
||||
// `ReadableStream` as a global, so this should only ever be hit on unsupported runtimes.
|
||||
throw new Error('`ReadableStream` is not defined as a global; You will need to polyfill it, `globalThis.ReadableStream = ReadableStream`');
|
||||
}
|
||||
return new ReadableStream(...args);
|
||||
}
|
||||
export function ReadableStreamFrom(iterable) {
|
||||
let iter = Symbol.asyncIterator in iterable ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
|
||||
return makeReadableStream({
|
||||
start() { },
|
||||
async pull(controller) {
|
||||
const { done, value } = await iter.next();
|
||||
if (done) {
|
||||
controller.close();
|
||||
}
|
||||
else {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await iter.return?.();
|
||||
},
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Most browsers don't yet have async iterable support for ReadableStream,
|
||||
* and Node has a very different way of reading bytes from its "ReadableStream".
|
||||
*
|
||||
* This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
|
||||
*/
|
||||
export function ReadableStreamToAsyncIterable(stream) {
|
||||
if (stream[Symbol.asyncIterator])
|
||||
return stream;
|
||||
const reader = stream.getReader();
|
||||
return {
|
||||
async next() {
|
||||
try {
|
||||
const result = await reader.read();
|
||||
if (result?.done)
|
||||
reader.releaseLock(); // release lock when stream becomes closed
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
reader.releaseLock(); // release lock when stream becomes errored
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async return() {
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
return { done: true, value: undefined };
|
||||
},
|
||||
[Symbol.asyncIterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Cancels a ReadableStream we don't need to consume.
|
||||
* See https://undici.nodejs.org/#/?id=garbage-collection
|
||||
*/
|
||||
export async function CancelReadableStream(stream) {
|
||||
if (stream === null || typeof stream !== 'object')
|
||||
return;
|
||||
if (stream[Symbol.asyncIterator]) {
|
||||
await stream[Symbol.asyncIterator]().return?.();
|
||||
return;
|
||||
}
|
||||
const reader = stream.getReader();
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
}
|
||||
//# sourceMappingURL=shims.mjs.map
|
||||
18
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/env.mjs
generated
vendored
18
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/env.mjs
generated
vendored
@@ -1,18 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* Read an environment variable.
|
||||
*
|
||||
* Trims beginning and trailing whitespace.
|
||||
*
|
||||
* Will return undefined if the environment variable doesn't exist or cannot be accessed.
|
||||
*/
|
||||
export const readEnv = (env) => {
|
||||
if (typeof globalThis.process !== 'undefined') {
|
||||
return globalThis.process.env?.[env]?.trim() ?? undefined;
|
||||
}
|
||||
if (typeof globalThis.Deno !== 'undefined') {
|
||||
return globalThis.Deno.env?.get?.(env)?.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
//# sourceMappingURL=env.mjs.map
|
||||
80
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/log.mjs
generated
vendored
80
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/log.mjs
generated
vendored
@@ -1,80 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { hasOwn } from "./values.mjs";
|
||||
const levelNumbers = {
|
||||
off: 0,
|
||||
error: 200,
|
||||
warn: 300,
|
||||
info: 400,
|
||||
debug: 500,
|
||||
};
|
||||
export const parseLogLevel = (maybeLevel, sourceName, client) => {
|
||||
if (!maybeLevel) {
|
||||
return undefined;
|
||||
}
|
||||
if (hasOwn(levelNumbers, maybeLevel)) {
|
||||
return maybeLevel;
|
||||
}
|
||||
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
||||
return undefined;
|
||||
};
|
||||
function noop() { }
|
||||
function makeLogFn(fnLevel, logger, logLevel) {
|
||||
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
||||
return noop;
|
||||
}
|
||||
else {
|
||||
// Don't wrap logger functions, we want the stacktrace intact!
|
||||
return logger[fnLevel].bind(logger);
|
||||
}
|
||||
}
|
||||
const noopLogger = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
debug: noop,
|
||||
};
|
||||
let cachedLoggers = /* @__PURE__ */ new WeakMap();
|
||||
export function loggerFor(client) {
|
||||
const logger = client.logger;
|
||||
const logLevel = client.logLevel ?? 'off';
|
||||
if (!logger) {
|
||||
return noopLogger;
|
||||
}
|
||||
const cachedLogger = cachedLoggers.get(logger);
|
||||
if (cachedLogger && cachedLogger[0] === logLevel) {
|
||||
return cachedLogger[1];
|
||||
}
|
||||
const levelLogger = {
|
||||
error: makeLogFn('error', logger, logLevel),
|
||||
warn: makeLogFn('warn', logger, logLevel),
|
||||
info: makeLogFn('info', logger, logLevel),
|
||||
debug: makeLogFn('debug', logger, logLevel),
|
||||
};
|
||||
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
||||
return levelLogger;
|
||||
}
|
||||
export const formatRequestDetails = (details) => {
|
||||
if (details.options) {
|
||||
details.options = { ...details.options };
|
||||
delete details.options['headers']; // redundant + leaks internals
|
||||
}
|
||||
if (details.headers) {
|
||||
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
||||
name,
|
||||
(name.toLowerCase() === 'x-api-key' ||
|
||||
name.toLowerCase() === 'authorization' ||
|
||||
name.toLowerCase() === 'cookie' ||
|
||||
name.toLowerCase() === 'set-cookie') ?
|
||||
'***'
|
||||
: value,
|
||||
]));
|
||||
}
|
||||
if ('retryOfRequestLogID' in details) {
|
||||
if (details.retryOfRequestLogID) {
|
||||
details.retryOf = details.retryOfRequestLogID;
|
||||
}
|
||||
delete details.retryOfRequestLogID;
|
||||
}
|
||||
return details;
|
||||
};
|
||||
//# sourceMappingURL=log.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/path.mjs
generated
vendored
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/path.mjs
generated
vendored
@@ -1,74 +0,0 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
/**
|
||||
* Percent-encode everything that isn't safe to have in a path without encoding safe chars.
|
||||
*
|
||||
* Taken from https://datatracker.ietf.org/doc/html/rfc3986#section-3.3:
|
||||
* > unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
|
||||
* > sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
|
||||
* > pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
|
||||
*/
|
||||
export function encodeURIPath(str) {
|
||||
return str.replace(/[^A-Za-z0-9\-._~!$&'()*+,;=:@]+/g, encodeURIComponent);
|
||||
}
|
||||
const EMPTY = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.create(null));
|
||||
export const createPathTagFunction = (pathEncoder = encodeURIPath) => function path(statics, ...params) {
|
||||
// If there are no params, no processing is needed.
|
||||
if (statics.length === 1)
|
||||
return statics[0];
|
||||
let postPath = false;
|
||||
const invalidSegments = [];
|
||||
const path = statics.reduce((previousValue, currentValue, index) => {
|
||||
if (/[?#]/.test(currentValue)) {
|
||||
postPath = true;
|
||||
}
|
||||
const value = params[index];
|
||||
let encoded = (postPath ? encodeURIComponent : pathEncoder)('' + value);
|
||||
if (index !== params.length &&
|
||||
(value == null ||
|
||||
(typeof value === 'object' &&
|
||||
// handle values from other realms
|
||||
value.toString ===
|
||||
Object.getPrototypeOf(Object.getPrototypeOf(value.hasOwnProperty ?? EMPTY) ?? EMPTY)
|
||||
?.toString))) {
|
||||
encoded = value + '';
|
||||
invalidSegments.push({
|
||||
start: previousValue.length + currentValue.length,
|
||||
length: encoded.length,
|
||||
error: `Value of type ${Object.prototype.toString
|
||||
.call(value)
|
||||
.slice(8, -1)} is not a valid path parameter`,
|
||||
});
|
||||
}
|
||||
return previousValue + currentValue + (index === params.length ? '' : encoded);
|
||||
}, '');
|
||||
const pathOnly = path.split(/[?#]/, 1)[0];
|
||||
const invalidSegmentPattern = /(?<=^|\/)(?:\.|%2e){1,2}(?=\/|$)/gi;
|
||||
let match;
|
||||
// Find all invalid segments
|
||||
while ((match = invalidSegmentPattern.exec(pathOnly)) !== null) {
|
||||
invalidSegments.push({
|
||||
start: match.index,
|
||||
length: match[0].length,
|
||||
error: `Value "${match[0]}" can\'t be safely passed as a path parameter`,
|
||||
});
|
||||
}
|
||||
invalidSegments.sort((a, b) => a.start - b.start);
|
||||
if (invalidSegments.length > 0) {
|
||||
let lastEnd = 0;
|
||||
const underline = invalidSegments.reduce((acc, segment) => {
|
||||
const spaces = ' '.repeat(segment.start - lastEnd);
|
||||
const arrows = '^'.repeat(segment.length);
|
||||
lastEnd = segment.start + segment.length;
|
||||
return acc + spaces + arrows;
|
||||
}, '');
|
||||
throw new AnthropicError(`Path parameters result in path with invalid segments:\n${invalidSegments
|
||||
.map((e) => e.error)
|
||||
.join('\n')}\n${path}\n${underline}`);
|
||||
}
|
||||
return path;
|
||||
};
|
||||
/**
|
||||
* URI-encodes path params and ensures no unsafe /./ or /../ path segments are introduced.
|
||||
*/
|
||||
export const path = /* @__PURE__ */ createPathTagFunction(encodeURIPath);
|
||||
//# sourceMappingURL=path.mjs.map
|
||||
94
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/values.mjs
generated
vendored
94
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/values.mjs
generated
vendored
@@ -1,94 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
// https://url.spec.whatwg.org/#url-scheme-string
|
||||
const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i;
|
||||
export const isAbsoluteURL = (url) => {
|
||||
return startsWithSchemeRegexp.test(url);
|
||||
};
|
||||
export let isArray = (val) => ((isArray = Array.isArray), isArray(val));
|
||||
export let isReadonlyArray = isArray;
|
||||
/** Returns an object if the given value isn't an object, otherwise returns as-is */
|
||||
export function maybeObj(x) {
|
||||
if (typeof x !== 'object') {
|
||||
return {};
|
||||
}
|
||||
return x ?? {};
|
||||
}
|
||||
// https://stackoverflow.com/a/34491287
|
||||
export function isEmptyObj(obj) {
|
||||
if (!obj)
|
||||
return true;
|
||||
for (const _k in obj)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
// https://eslint.org/docs/latest/rules/no-prototype-builtins
|
||||
export function hasOwn(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
export function isObj(obj) {
|
||||
return obj != null && typeof obj === 'object' && !Array.isArray(obj);
|
||||
}
|
||||
export const ensurePresent = (value) => {
|
||||
if (value == null) {
|
||||
throw new AnthropicError(`Expected a value to be given but received ${value} instead.`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
export const validatePositiveInteger = (name, n) => {
|
||||
if (typeof n !== 'number' || !Number.isInteger(n)) {
|
||||
throw new AnthropicError(`${name} must be an integer`);
|
||||
}
|
||||
if (n < 0) {
|
||||
throw new AnthropicError(`${name} must be a positive integer`);
|
||||
}
|
||||
return n;
|
||||
};
|
||||
export const coerceInteger = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return Math.round(value);
|
||||
if (typeof value === 'string')
|
||||
return parseInt(value, 10);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceFloat = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return parseFloat(value);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceBoolean = (value) => {
|
||||
if (typeof value === 'boolean')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return value === 'true';
|
||||
return Boolean(value);
|
||||
};
|
||||
export const maybeCoerceInteger = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceInteger(value);
|
||||
};
|
||||
export const maybeCoerceFloat = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceFloat(value);
|
||||
};
|
||||
export const maybeCoerceBoolean = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceBoolean(value);
|
||||
};
|
||||
export const safeJSON = (text) => {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=values.mjs.map
|
||||
98
extracted-source/node_modules/@anthropic-ai/foundry-sdk/client.mjs
generated
vendored
98
extracted-source/node_modules/@anthropic-ai/foundry-sdk/client.mjs
generated
vendored
@@ -1,98 +0,0 @@
|
||||
import { buildHeaders } from "./internal/headers.mjs";
|
||||
import * as Errors from "./core/error.mjs";
|
||||
import { readEnv } from "./internal/utils.mjs";
|
||||
import { Anthropic } from '@anthropic-ai/sdk/client';
|
||||
export { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
import * as Resources from '@anthropic-ai/sdk/resources/index';
|
||||
/** API Client for interfacing with the Anthropic Foundry API. */
|
||||
export class AnthropicFoundry extends Anthropic {
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic Foundry API.
|
||||
*
|
||||
* @param {string | undefined} [opts.resource=process.env['ANTHROPIC_FOUNDRY_RESOURCE'] ?? undefined] - Your Foundry resource name
|
||||
* @param {string | undefined} [opts.apiKey=process.env['ANTHROPIC_FOUNDRY_API_KEY'] ?? undefined]
|
||||
* @param {string | null | undefined} [opts.organization=process.env['ANTHROPIC_ORG_ID'] ?? null]
|
||||
* @param {string} [opts.baseURL=process.env['ANTHROPIC_FOUNDRY_BASE_URL']] - Sets the base URL for the API, e.g. `https://example-resource.azure.anthropic.com/anthropic/`.
|
||||
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
||||
* @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections.
|
||||
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
||||
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
||||
* @param {Headers} opts.defaultHeaders - Default headers to include with every request to the API.
|
||||
* @param {DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API.
|
||||
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
||||
*/
|
||||
constructor({ baseURL = readEnv('ANTHROPIC_FOUNDRY_BASE_URL'), apiKey = readEnv('ANTHROPIC_FOUNDRY_API_KEY'), resource = readEnv('ANTHROPIC_FOUNDRY_RESOURCE'), azureADTokenProvider, dangerouslyAllowBrowser, ...opts } = {}) {
|
||||
if (typeof azureADTokenProvider === 'function') {
|
||||
dangerouslyAllowBrowser = true;
|
||||
}
|
||||
if (!azureADTokenProvider && !apiKey) {
|
||||
throw new Errors.AnthropicError('Missing credentials. Please pass one of `apiKey` and `azureTokenProvider`, or set the `ANTHROPIC_FOUNDRY_API_KEY` environment variable.');
|
||||
}
|
||||
if (azureADTokenProvider && apiKey) {
|
||||
throw new Errors.AnthropicError('The `apiKey` and `azureADTokenProvider` arguments are mutually exclusive; only one can be passed at a time.');
|
||||
}
|
||||
if (!baseURL) {
|
||||
if (!resource) {
|
||||
throw new Errors.AnthropicError('Must provide one of the `baseURL` or `resource` arguments, or the `ANTHROPIC_FOUNDRY_RESOURCE` environment variable');
|
||||
}
|
||||
baseURL = `https://${resource}.services.ai.azure.com/anthropic/`;
|
||||
}
|
||||
else {
|
||||
if (resource) {
|
||||
throw new Errors.AnthropicError('baseURL and resource are mutually exclusive');
|
||||
}
|
||||
}
|
||||
super({
|
||||
apiKey: azureADTokenProvider ?? apiKey,
|
||||
baseURL,
|
||||
...opts,
|
||||
...(dangerouslyAllowBrowser !== undefined ? { dangerouslyAllowBrowser } : {}),
|
||||
});
|
||||
this.resource = null;
|
||||
// @ts-expect-error are using a different Messages type that omits batches
|
||||
this.messages = makeMessagesResource(this);
|
||||
// @ts-expect-error are using a different Beta type that omits batches
|
||||
this.beta = makeBetaResource(this);
|
||||
// @ts-expect-error Anthropic Foundry does not support models endpoint
|
||||
this.models = undefined;
|
||||
}
|
||||
async authHeaders() {
|
||||
if (typeof this._options.apiKey === 'function') {
|
||||
let token;
|
||||
try {
|
||||
token = await this._options.apiKey();
|
||||
}
|
||||
catch (err) {
|
||||
if (err instanceof Errors.AnthropicError)
|
||||
throw err;
|
||||
throw new Errors.AnthropicError(`Failed to get token from azureADTokenProvider: ${err.message}`,
|
||||
// @ts-ignore
|
||||
{ cause: err });
|
||||
}
|
||||
if (typeof token !== 'string' || !token) {
|
||||
throw new Errors.AnthropicError(`Expected azureADTokenProvider function argument to return a string but it returned ${token}`);
|
||||
}
|
||||
return buildHeaders([{ Authorization: `Bearer ${token}` }]);
|
||||
}
|
||||
if (typeof this._options.apiKey === 'string') {
|
||||
return buildHeaders([{ 'x-api-key': this.apiKey }]);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
validateHeaders() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
function makeMessagesResource(client) {
|
||||
const resource = new Resources.Messages(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.batches;
|
||||
return resource;
|
||||
}
|
||||
function makeBetaResource(client) {
|
||||
const resource = new Resources.Beta(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.batches;
|
||||
return resource;
|
||||
}
|
||||
//# sourceMappingURL=client.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/foundry-sdk/core/error.mjs
generated
vendored
2
extracted-source/node_modules/@anthropic-ai/foundry-sdk/core/error.mjs
generated
vendored
@@ -1,2 +0,0 @@
|
||||
export * from '@anthropic-ai/sdk/core/error';
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/foundry-sdk/index.mjs
generated
vendored
3
extracted-source/node_modules/@anthropic-ai/foundry-sdk/index.mjs
generated
vendored
@@ -1,3 +0,0 @@
|
||||
export * from "./client.mjs";
|
||||
export { AnthropicFoundry as default } from "./client.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/headers.mjs
generated
vendored
74
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/headers.mjs
generated
vendored
@@ -1,74 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { isReadonlyArray } from "./utils/values.mjs";
|
||||
const brand_privateNullableHeaders = Symbol.for('brand.privateNullableHeaders');
|
||||
function* iterateHeaders(headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
if (brand_privateNullableHeaders in headers) {
|
||||
const { values, nulls } = headers;
|
||||
yield* values.entries();
|
||||
for (const name of nulls) {
|
||||
yield [name, null];
|
||||
}
|
||||
return;
|
||||
}
|
||||
let shouldClear = false;
|
||||
let iter;
|
||||
if (headers instanceof Headers) {
|
||||
iter = headers.entries();
|
||||
}
|
||||
else if (isReadonlyArray(headers)) {
|
||||
iter = headers;
|
||||
}
|
||||
else {
|
||||
shouldClear = true;
|
||||
iter = Object.entries(headers ?? {});
|
||||
}
|
||||
for (let row of iter) {
|
||||
const name = row[0];
|
||||
if (typeof name !== 'string')
|
||||
throw new TypeError('expected header name to be a string');
|
||||
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
||||
let didClear = false;
|
||||
for (const value of values) {
|
||||
if (value === undefined)
|
||||
continue;
|
||||
// Objects keys always overwrite older headers, they never append.
|
||||
// Yield a null to clear the header before adding the new values.
|
||||
if (shouldClear && !didClear) {
|
||||
didClear = true;
|
||||
yield [name, null];
|
||||
}
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
export const buildHeaders = (newHeaders) => {
|
||||
const targetHeaders = new Headers();
|
||||
const nullHeaders = new Set();
|
||||
for (const headers of newHeaders) {
|
||||
const seenHeaders = new Set();
|
||||
for (const [name, value] of iterateHeaders(headers)) {
|
||||
const lowerName = name.toLowerCase();
|
||||
if (!seenHeaders.has(lowerName)) {
|
||||
targetHeaders.delete(name);
|
||||
seenHeaders.add(lowerName);
|
||||
}
|
||||
if (value === null) {
|
||||
targetHeaders.delete(name);
|
||||
nullHeaders.add(lowerName);
|
||||
}
|
||||
else {
|
||||
targetHeaders.append(name, value);
|
||||
nullHeaders.delete(lowerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
||||
};
|
||||
export const isEmptyHeaders = (headers) => {
|
||||
for (const _ of iterateHeaders(headers))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
//# sourceMappingURL=headers.mjs.map
|
||||
8
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils.mjs
generated
vendored
8
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils.mjs
generated
vendored
@@ -1,8 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export * from "./utils/values.mjs";
|
||||
export * from "./utils/base64.mjs";
|
||||
export * from "./utils/env.mjs";
|
||||
export * from "./utils/log.mjs";
|
||||
export * from "./utils/uuid.mjs";
|
||||
export * from "./utils/sleep.mjs";
|
||||
//# sourceMappingURL=utils.mjs.map
|
||||
33
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/base64.mjs
generated
vendored
33
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/base64.mjs
generated
vendored
@@ -1,33 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
import { encodeUTF8 } from "./bytes.mjs";
|
||||
export const toBase64 = (data) => {
|
||||
if (!data)
|
||||
return '';
|
||||
if (typeof globalThis.Buffer !== 'undefined') {
|
||||
return globalThis.Buffer.from(data).toString('base64');
|
||||
}
|
||||
if (typeof data === 'string') {
|
||||
data = encodeUTF8(data);
|
||||
}
|
||||
if (typeof btoa !== 'undefined') {
|
||||
return btoa(String.fromCharCode.apply(null, data));
|
||||
}
|
||||
throw new AnthropicError('Cannot generate base64 string; Expected `Buffer` or `btoa` to be defined');
|
||||
};
|
||||
export const fromBase64 = (str) => {
|
||||
if (typeof globalThis.Buffer !== 'undefined') {
|
||||
const buf = globalThis.Buffer.from(str, 'base64');
|
||||
return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
}
|
||||
if (typeof atob !== 'undefined') {
|
||||
const bstr = atob(str);
|
||||
const buf = new Uint8Array(bstr.length);
|
||||
for (let i = 0; i < bstr.length; i++) {
|
||||
buf[i] = bstr.charCodeAt(i);
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
throw new AnthropicError('Cannot decode base64 string; Expected `Buffer` or `atob` to be defined');
|
||||
};
|
||||
//# sourceMappingURL=base64.mjs.map
|
||||
18
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/env.mjs
generated
vendored
18
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/env.mjs
generated
vendored
@@ -1,18 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* Read an environment variable.
|
||||
*
|
||||
* Trims beginning and trailing whitespace.
|
||||
*
|
||||
* Will return undefined if the environment variable doesn't exist or cannot be accessed.
|
||||
*/
|
||||
export const readEnv = (env) => {
|
||||
if (typeof globalThis.process !== 'undefined') {
|
||||
return globalThis.process.env?.[env]?.trim() ?? undefined;
|
||||
}
|
||||
if (typeof globalThis.Deno !== 'undefined') {
|
||||
return globalThis.Deno.env?.get?.(env)?.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
//# sourceMappingURL=env.mjs.map
|
||||
80
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/log.mjs
generated
vendored
80
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/log.mjs
generated
vendored
@@ -1,80 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { hasOwn } from "./values.mjs";
|
||||
const levelNumbers = {
|
||||
off: 0,
|
||||
error: 200,
|
||||
warn: 300,
|
||||
info: 400,
|
||||
debug: 500,
|
||||
};
|
||||
export const parseLogLevel = (maybeLevel, sourceName, client) => {
|
||||
if (!maybeLevel) {
|
||||
return undefined;
|
||||
}
|
||||
if (hasOwn(levelNumbers, maybeLevel)) {
|
||||
return maybeLevel;
|
||||
}
|
||||
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
||||
return undefined;
|
||||
};
|
||||
function noop() { }
|
||||
function makeLogFn(fnLevel, logger, logLevel) {
|
||||
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
||||
return noop;
|
||||
}
|
||||
else {
|
||||
// Don't wrap logger functions, we want the stacktrace intact!
|
||||
return logger[fnLevel].bind(logger);
|
||||
}
|
||||
}
|
||||
const noopLogger = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
debug: noop,
|
||||
};
|
||||
let cachedLoggers = /* @__PURE__ */ new WeakMap();
|
||||
export function loggerFor(client) {
|
||||
const logger = client.logger;
|
||||
const logLevel = client.logLevel ?? 'off';
|
||||
if (!logger) {
|
||||
return noopLogger;
|
||||
}
|
||||
const cachedLogger = cachedLoggers.get(logger);
|
||||
if (cachedLogger && cachedLogger[0] === logLevel) {
|
||||
return cachedLogger[1];
|
||||
}
|
||||
const levelLogger = {
|
||||
error: makeLogFn('error', logger, logLevel),
|
||||
warn: makeLogFn('warn', logger, logLevel),
|
||||
info: makeLogFn('info', logger, logLevel),
|
||||
debug: makeLogFn('debug', logger, logLevel),
|
||||
};
|
||||
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
||||
return levelLogger;
|
||||
}
|
||||
export const formatRequestDetails = (details) => {
|
||||
if (details.options) {
|
||||
details.options = { ...details.options };
|
||||
delete details.options['headers']; // redundant + leaks internals
|
||||
}
|
||||
if (details.headers) {
|
||||
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
||||
name,
|
||||
(name.toLowerCase() === 'x-api-key' ||
|
||||
name.toLowerCase() === 'authorization' ||
|
||||
name.toLowerCase() === 'cookie' ||
|
||||
name.toLowerCase() === 'set-cookie') ?
|
||||
'***'
|
||||
: value,
|
||||
]));
|
||||
}
|
||||
if ('retryOfRequestLogID' in details) {
|
||||
if (details.retryOfRequestLogID) {
|
||||
details.retryOf = details.retryOfRequestLogID;
|
||||
}
|
||||
delete details.retryOfRequestLogID;
|
||||
}
|
||||
return details;
|
||||
};
|
||||
//# sourceMappingURL=log.mjs.map
|
||||
100
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/values.mjs
generated
vendored
100
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/values.mjs
generated
vendored
@@ -1,100 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
// https://url.spec.whatwg.org/#url-scheme-string
|
||||
const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i;
|
||||
export const isAbsoluteURL = (url) => {
|
||||
return startsWithSchemeRegexp.test(url);
|
||||
};
|
||||
export let isArray = (val) => ((isArray = Array.isArray), isArray(val));
|
||||
export let isReadonlyArray = isArray;
|
||||
/** Returns an object if the given value isn't an object, otherwise returns as-is */
|
||||
export function maybeObj(x) {
|
||||
if (typeof x !== 'object') {
|
||||
return {};
|
||||
}
|
||||
return x ?? {};
|
||||
}
|
||||
// https://stackoverflow.com/a/34491287
|
||||
export function isEmptyObj(obj) {
|
||||
if (!obj)
|
||||
return true;
|
||||
for (const _k in obj)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
// https://eslint.org/docs/latest/rules/no-prototype-builtins
|
||||
export function hasOwn(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
export function isObj(obj) {
|
||||
return obj != null && typeof obj === 'object' && !Array.isArray(obj);
|
||||
}
|
||||
export const ensurePresent = (value) => {
|
||||
if (value == null) {
|
||||
throw new AnthropicError(`Expected a value to be given but received ${value} instead.`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
export const validatePositiveInteger = (name, n) => {
|
||||
if (typeof n !== 'number' || !Number.isInteger(n)) {
|
||||
throw new AnthropicError(`${name} must be an integer`);
|
||||
}
|
||||
if (n < 0) {
|
||||
throw new AnthropicError(`${name} must be a positive integer`);
|
||||
}
|
||||
return n;
|
||||
};
|
||||
export const coerceInteger = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return Math.round(value);
|
||||
if (typeof value === 'string')
|
||||
return parseInt(value, 10);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceFloat = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return parseFloat(value);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceBoolean = (value) => {
|
||||
if (typeof value === 'boolean')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return value === 'true';
|
||||
return Boolean(value);
|
||||
};
|
||||
export const maybeCoerceInteger = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceInteger(value);
|
||||
};
|
||||
export const maybeCoerceFloat = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceFloat(value);
|
||||
};
|
||||
export const maybeCoerceBoolean = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceBoolean(value);
|
||||
};
|
||||
export const safeJSON = (text) => {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
// Gets a value from an object, deletes the key, and returns the value (or undefined if not found)
|
||||
export const pop = (obj, key) => {
|
||||
const value = obj[key];
|
||||
delete obj[key];
|
||||
return value;
|
||||
};
|
||||
//# sourceMappingURL=values.mjs.map
|
||||
705
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/init.js
generated
vendored
705
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/init.js
generated
vendored
@@ -1,705 +0,0 @@
|
||||
import { confirm, input, select } from "@inquirer/prompts";
|
||||
import { existsSync, readFileSync, writeFileSync } from "fs";
|
||||
import { basename, join, resolve } from "path";
|
||||
import { CURRENT_MANIFEST_VERSION } from "../schemas.js";
|
||||
export function readPackageJson(dirPath) {
|
||||
const packageJsonPath = join(dirPath, "package.json");
|
||||
if (existsSync(packageJsonPath)) {
|
||||
try {
|
||||
return JSON.parse(readFileSync(packageJsonPath, "utf-8"));
|
||||
}
|
||||
catch (e) {
|
||||
// Ignore package.json parsing errors
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
export function getDefaultAuthorName(packageData) {
|
||||
if (typeof packageData.author === "string") {
|
||||
return packageData.author;
|
||||
}
|
||||
return packageData.author?.name || "";
|
||||
}
|
||||
export function getDefaultAuthorEmail(packageData) {
|
||||
if (typeof packageData.author === "object") {
|
||||
return packageData.author?.email || "";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
export function getDefaultAuthorUrl(packageData) {
|
||||
if (typeof packageData.author === "object") {
|
||||
return packageData.author?.url || "";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
export function getDefaultRepositoryUrl(packageData) {
|
||||
if (typeof packageData.repository === "string") {
|
||||
return packageData.repository;
|
||||
}
|
||||
return packageData.repository?.url || "";
|
||||
}
|
||||
export function getDefaultBasicInfo(packageData, resolvedPath) {
|
||||
const name = packageData.name || basename(resolvedPath);
|
||||
const authorName = getDefaultAuthorName(packageData) || "Unknown Author";
|
||||
const displayName = name;
|
||||
const version = packageData.version || "1.0.0";
|
||||
const description = packageData.description || "A MCPB bundle";
|
||||
return { name, authorName, displayName, version, description };
|
||||
}
|
||||
export function getDefaultAuthorInfo(packageData) {
|
||||
return {
|
||||
authorEmail: getDefaultAuthorEmail(packageData),
|
||||
authorUrl: getDefaultAuthorUrl(packageData),
|
||||
};
|
||||
}
|
||||
export function getDefaultServerConfig(packageData) {
|
||||
const serverType = "node";
|
||||
const entryPoint = getDefaultEntryPoint(serverType, packageData);
|
||||
const mcp_config = createMcpConfig(serverType, entryPoint);
|
||||
return { serverType, entryPoint, mcp_config };
|
||||
}
|
||||
export function getDefaultOptionalFields(packageData) {
|
||||
return {
|
||||
keywords: "",
|
||||
license: packageData.license || "MIT",
|
||||
repository: undefined,
|
||||
};
|
||||
}
|
||||
export function createMcpConfig(serverType, entryPoint) {
|
||||
switch (serverType) {
|
||||
case "node":
|
||||
return {
|
||||
command: "node",
|
||||
args: ["${__dirname}/" + entryPoint],
|
||||
env: {},
|
||||
};
|
||||
case "python":
|
||||
return {
|
||||
command: "python",
|
||||
args: ["${__dirname}/" + entryPoint],
|
||||
env: {
|
||||
PYTHONPATH: "${__dirname}/server/lib",
|
||||
},
|
||||
};
|
||||
case "binary":
|
||||
return {
|
||||
command: "${__dirname}/" + entryPoint,
|
||||
args: [],
|
||||
env: {},
|
||||
};
|
||||
}
|
||||
}
|
||||
export function getDefaultEntryPoint(serverType, packageData) {
|
||||
switch (serverType) {
|
||||
case "node":
|
||||
return packageData?.main || "server/index.js";
|
||||
case "python":
|
||||
return "server/main.py";
|
||||
case "binary":
|
||||
return "server/my-server";
|
||||
}
|
||||
}
|
||||
export async function promptBasicInfo(packageData, resolvedPath) {
|
||||
const defaultName = packageData.name || basename(resolvedPath);
|
||||
const name = await input({
|
||||
message: "Extension name:",
|
||||
default: defaultName,
|
||||
validate: (value) => value.trim().length > 0 || "Name is required",
|
||||
});
|
||||
const authorName = await input({
|
||||
message: "Author name:",
|
||||
default: getDefaultAuthorName(packageData),
|
||||
validate: (value) => value.trim().length > 0 || "Author name is required",
|
||||
});
|
||||
const displayName = await input({
|
||||
message: "Display name (optional):",
|
||||
default: name,
|
||||
});
|
||||
const version = await input({
|
||||
message: "Version:",
|
||||
default: packageData.version || "1.0.0",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Version is required";
|
||||
if (!/^\d+\.\d+\.\d+/.test(value)) {
|
||||
return "Version must follow semantic versioning (e.g., 1.0.0)";
|
||||
}
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const description = await input({
|
||||
message: "Description:",
|
||||
default: packageData.description || "",
|
||||
validate: (value) => value.trim().length > 0 || "Description is required",
|
||||
});
|
||||
return { name, authorName, displayName, version, description };
|
||||
}
|
||||
export async function promptAuthorInfo(packageData) {
|
||||
const authorEmail = await input({
|
||||
message: "Author email (optional):",
|
||||
default: getDefaultAuthorEmail(packageData),
|
||||
});
|
||||
const authorUrl = await input({
|
||||
message: "Author URL (optional):",
|
||||
default: getDefaultAuthorUrl(packageData),
|
||||
});
|
||||
return { authorEmail, authorUrl };
|
||||
}
|
||||
export async function promptServerConfig(packageData) {
|
||||
const serverType = (await select({
|
||||
message: "Server type:",
|
||||
choices: [
|
||||
{ name: "Node.js", value: "node" },
|
||||
{ name: "Python", value: "python" },
|
||||
{ name: "Binary", value: "binary" },
|
||||
],
|
||||
default: "node",
|
||||
}));
|
||||
const entryPoint = await input({
|
||||
message: "Entry point:",
|
||||
default: getDefaultEntryPoint(serverType, packageData),
|
||||
});
|
||||
const mcp_config = createMcpConfig(serverType, entryPoint);
|
||||
return { serverType, entryPoint, mcp_config };
|
||||
}
|
||||
export async function promptTools() {
|
||||
const addTools = await confirm({
|
||||
message: "Does your MCP Server provide tools you want to advertise (optional)?",
|
||||
default: true,
|
||||
});
|
||||
const tools = [];
|
||||
let toolsGenerated = false;
|
||||
if (addTools) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const toolName = await input({
|
||||
message: "Tool name:",
|
||||
validate: (value) => value.trim().length > 0 || "Tool name is required",
|
||||
});
|
||||
const toolDescription = await input({
|
||||
message: "Tool description (optional):",
|
||||
});
|
||||
tools.push({
|
||||
name: toolName,
|
||||
...(toolDescription ? { description: toolDescription } : {}),
|
||||
});
|
||||
addMore = await confirm({
|
||||
message: "Add another tool?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
// Ask about generated tools
|
||||
toolsGenerated = await confirm({
|
||||
message: "Does your server generate additional tools at runtime?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return { tools, toolsGenerated };
|
||||
}
|
||||
export async function promptPrompts() {
|
||||
const addPrompts = await confirm({
|
||||
message: "Does your MCP Server provide prompts you want to advertise (optional)?",
|
||||
default: false,
|
||||
});
|
||||
const prompts = [];
|
||||
let promptsGenerated = false;
|
||||
if (addPrompts) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const promptName = await input({
|
||||
message: "Prompt name:",
|
||||
validate: (value) => value.trim().length > 0 || "Prompt name is required",
|
||||
});
|
||||
const promptDescription = await input({
|
||||
message: "Prompt description (optional):",
|
||||
});
|
||||
// Ask about arguments
|
||||
const hasArguments = await confirm({
|
||||
message: "Does this prompt have arguments?",
|
||||
default: false,
|
||||
});
|
||||
const argumentNames = [];
|
||||
if (hasArguments) {
|
||||
let addMoreArgs = true;
|
||||
while (addMoreArgs) {
|
||||
const argName = await input({
|
||||
message: "Argument name:",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Argument name is required";
|
||||
if (argumentNames.includes(value)) {
|
||||
return "Argument names must be unique";
|
||||
}
|
||||
return true;
|
||||
},
|
||||
});
|
||||
argumentNames.push(argName);
|
||||
addMoreArgs = await confirm({
|
||||
message: "Add another argument?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
// Prompt for the text template
|
||||
const promptText = await input({
|
||||
message: hasArguments
|
||||
? `Prompt text (use \${arguments.name} for arguments: ${argumentNames.join(", ")}):`
|
||||
: "Prompt text:",
|
||||
validate: (value) => value.trim().length > 0 || "Prompt text is required",
|
||||
});
|
||||
prompts.push({
|
||||
name: promptName,
|
||||
...(promptDescription ? { description: promptDescription } : {}),
|
||||
...(argumentNames.length > 0 ? { arguments: argumentNames } : {}),
|
||||
text: promptText,
|
||||
});
|
||||
addMore = await confirm({
|
||||
message: "Add another prompt?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
// Ask about generated prompts
|
||||
promptsGenerated = await confirm({
|
||||
message: "Does your server generate additional prompts at runtime?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return { prompts, promptsGenerated };
|
||||
}
|
||||
export async function promptOptionalFields(packageData) {
|
||||
const keywords = await input({
|
||||
message: "Keywords (comma-separated, optional):",
|
||||
default: "",
|
||||
});
|
||||
const license = await input({
|
||||
message: "License:",
|
||||
default: packageData.license || "MIT",
|
||||
});
|
||||
const addRepository = await confirm({
|
||||
message: "Add repository information?",
|
||||
default: !!packageData.repository,
|
||||
});
|
||||
let repository;
|
||||
if (addRepository) {
|
||||
const repoUrl = await input({
|
||||
message: "Repository URL:",
|
||||
default: getDefaultRepositoryUrl(packageData),
|
||||
});
|
||||
if (repoUrl) {
|
||||
repository = {
|
||||
type: "git",
|
||||
url: repoUrl,
|
||||
};
|
||||
}
|
||||
}
|
||||
return { keywords, license, repository };
|
||||
}
|
||||
export async function promptLongDescription(description) {
|
||||
const hasLongDescription = await confirm({
|
||||
message: "Add a detailed long description?",
|
||||
default: false,
|
||||
});
|
||||
if (hasLongDescription) {
|
||||
const longDescription = await input({
|
||||
message: "Long description (supports basic markdown):",
|
||||
default: description,
|
||||
});
|
||||
return longDescription;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
export async function promptUrls() {
|
||||
const homepage = await input({
|
||||
message: "Homepage URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL (e.g., https://example.com)";
|
||||
}
|
||||
},
|
||||
});
|
||||
const documentation = await input({
|
||||
message: "Documentation URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL";
|
||||
}
|
||||
},
|
||||
});
|
||||
const support = await input({
|
||||
message: "Support URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL";
|
||||
}
|
||||
},
|
||||
});
|
||||
return { homepage, documentation, support };
|
||||
}
|
||||
export async function promptVisualAssets() {
|
||||
const icon = await input({
|
||||
message: "Icon file path (optional, relative to manifest):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
if (value.includes(".."))
|
||||
return "Relative paths cannot include '..'";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const addScreenshots = await confirm({
|
||||
message: "Add screenshots?",
|
||||
default: false,
|
||||
});
|
||||
const screenshots = [];
|
||||
if (addScreenshots) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const screenshot = await input({
|
||||
message: "Screenshot file path (relative to manifest):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Screenshot path is required";
|
||||
if (value.includes(".."))
|
||||
return "Relative paths cannot include '..'";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
screenshots.push(screenshot);
|
||||
addMore = await confirm({
|
||||
message: "Add another screenshot?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
return { icon, screenshots };
|
||||
}
|
||||
export async function promptCompatibility(serverType) {
|
||||
const addCompatibility = await confirm({
|
||||
message: "Add compatibility constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (!addCompatibility) {
|
||||
return undefined;
|
||||
}
|
||||
const addPlatforms = await confirm({
|
||||
message: "Specify supported platforms?",
|
||||
default: false,
|
||||
});
|
||||
let platforms;
|
||||
if (addPlatforms) {
|
||||
const selectedPlatforms = [];
|
||||
const supportsDarwin = await confirm({
|
||||
message: "Support macOS (darwin)?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsDarwin)
|
||||
selectedPlatforms.push("darwin");
|
||||
const supportsWin32 = await confirm({
|
||||
message: "Support Windows (win32)?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsWin32)
|
||||
selectedPlatforms.push("win32");
|
||||
const supportsLinux = await confirm({
|
||||
message: "Support Linux?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsLinux)
|
||||
selectedPlatforms.push("linux");
|
||||
platforms = selectedPlatforms.length > 0 ? selectedPlatforms : undefined;
|
||||
}
|
||||
let runtimes;
|
||||
if (serverType !== "binary") {
|
||||
const addRuntimes = await confirm({
|
||||
message: "Specify runtime version constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (addRuntimes) {
|
||||
if (serverType === "python") {
|
||||
const pythonVersion = await input({
|
||||
message: "Python version constraint (e.g., >=3.8,<4.0):",
|
||||
validate: (value) => value.trim().length > 0 || "Python version constraint is required",
|
||||
});
|
||||
runtimes = { python: pythonVersion };
|
||||
}
|
||||
else if (serverType === "node") {
|
||||
const nodeVersion = await input({
|
||||
message: "Node.js version constraint (e.g., >=16.0.0):",
|
||||
validate: (value) => value.trim().length > 0 || "Node.js version constraint is required",
|
||||
});
|
||||
runtimes = { node: nodeVersion };
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
...(platforms ? { platforms } : {}),
|
||||
...(runtimes ? { runtimes } : {}),
|
||||
};
|
||||
}
|
||||
export async function promptUserConfig() {
|
||||
const addUserConfig = await confirm({
|
||||
message: "Add user-configurable options?",
|
||||
default: false,
|
||||
});
|
||||
if (!addUserConfig) {
|
||||
return {};
|
||||
}
|
||||
const userConfig = {};
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const optionKey = await input({
|
||||
message: "Configuration option key (unique identifier):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Key is required";
|
||||
if (userConfig[value])
|
||||
return "Key must be unique";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const optionType = (await select({
|
||||
message: "Option type:",
|
||||
choices: [
|
||||
{ name: "String", value: "string" },
|
||||
{ name: "Number", value: "number" },
|
||||
{ name: "Boolean", value: "boolean" },
|
||||
{ name: "Directory", value: "directory" },
|
||||
{ name: "File", value: "file" },
|
||||
],
|
||||
}));
|
||||
const optionTitle = await input({
|
||||
message: "Option title (human-readable name):",
|
||||
validate: (value) => value.trim().length > 0 || "Title is required",
|
||||
});
|
||||
const optionDescription = await input({
|
||||
message: "Option description:",
|
||||
validate: (value) => value.trim().length > 0 || "Description is required",
|
||||
});
|
||||
const optionRequired = await confirm({
|
||||
message: "Is this option required?",
|
||||
default: false,
|
||||
});
|
||||
const optionSensitive = await confirm({
|
||||
message: "Is this option sensitive (like a password)?",
|
||||
default: false,
|
||||
});
|
||||
// Build the option object
|
||||
const option = {
|
||||
type: optionType,
|
||||
title: optionTitle,
|
||||
description: optionDescription,
|
||||
required: optionRequired,
|
||||
sensitive: optionSensitive,
|
||||
};
|
||||
// Add default value if not required
|
||||
if (!optionRequired) {
|
||||
let defaultValue;
|
||||
if (optionType === "boolean") {
|
||||
defaultValue = await confirm({
|
||||
message: "Default value:",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
else if (optionType === "number") {
|
||||
const defaultStr = await input({
|
||||
message: "Default value (number):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
defaultValue = defaultStr ? Number(defaultStr) : undefined;
|
||||
}
|
||||
else {
|
||||
defaultValue = await input({
|
||||
message: "Default value (optional):",
|
||||
});
|
||||
}
|
||||
if (defaultValue !== undefined && defaultValue !== "") {
|
||||
option.default = defaultValue;
|
||||
}
|
||||
}
|
||||
// Add constraints for number types
|
||||
if (optionType === "number") {
|
||||
const addConstraints = await confirm({
|
||||
message: "Add min/max constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (addConstraints) {
|
||||
const min = await input({
|
||||
message: "Minimum value (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
const max = await input({
|
||||
message: "Maximum value (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
if (min)
|
||||
option.min = Number(min);
|
||||
if (max)
|
||||
option.max = Number(max);
|
||||
}
|
||||
}
|
||||
userConfig[optionKey] = option;
|
||||
addMore = await confirm({
|
||||
message: "Add another configuration option?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return userConfig;
|
||||
}
|
||||
export function buildManifest(basicInfo, longDescription, authorInfo, urls, visualAssets, serverConfig, tools, toolsGenerated, prompts, promptsGenerated, compatibility, userConfig, optionalFields) {
|
||||
const { name, displayName, version, description, authorName } = basicInfo;
|
||||
const { authorEmail, authorUrl } = authorInfo;
|
||||
const { serverType, entryPoint, mcp_config } = serverConfig;
|
||||
const { keywords, license, repository } = optionalFields;
|
||||
return {
|
||||
manifest_version: CURRENT_MANIFEST_VERSION,
|
||||
name,
|
||||
...(displayName && displayName !== name
|
||||
? { display_name: displayName }
|
||||
: {}),
|
||||
version,
|
||||
description,
|
||||
...(longDescription ? { long_description: longDescription } : {}),
|
||||
author: {
|
||||
name: authorName,
|
||||
...(authorEmail ? { email: authorEmail } : {}),
|
||||
...(authorUrl ? { url: authorUrl } : {}),
|
||||
},
|
||||
...(urls.homepage ? { homepage: urls.homepage } : {}),
|
||||
...(urls.documentation ? { documentation: urls.documentation } : {}),
|
||||
...(urls.support ? { support: urls.support } : {}),
|
||||
...(visualAssets.icon ? { icon: visualAssets.icon } : {}),
|
||||
...(visualAssets.screenshots.length > 0
|
||||
? { screenshots: visualAssets.screenshots }
|
||||
: {}),
|
||||
server: {
|
||||
type: serverType,
|
||||
entry_point: entryPoint,
|
||||
mcp_config,
|
||||
},
|
||||
...(tools.length > 0 ? { tools } : {}),
|
||||
...(toolsGenerated ? { tools_generated: true } : {}),
|
||||
...(prompts.length > 0 ? { prompts } : {}),
|
||||
...(promptsGenerated ? { prompts_generated: true } : {}),
|
||||
...(compatibility ? { compatibility } : {}),
|
||||
...(Object.keys(userConfig).length > 0 ? { user_config: userConfig } : {}),
|
||||
...(keywords
|
||||
? {
|
||||
keywords: keywords
|
||||
.split(",")
|
||||
.map((k) => k.trim())
|
||||
.filter((k) => k),
|
||||
}
|
||||
: {}),
|
||||
...(license ? { license } : {}),
|
||||
...(repository ? { repository } : {}),
|
||||
};
|
||||
}
|
||||
export function printNextSteps() {
|
||||
console.log("\nNext steps:");
|
||||
console.log(`1. Ensure all your production dependencies are in this directory`);
|
||||
console.log(`2. Run 'mcpb pack' to create your .mcpb file`);
|
||||
}
|
||||
export async function initExtension(targetPath = process.cwd(), nonInteractive = false) {
|
||||
const resolvedPath = resolve(targetPath);
|
||||
const manifestPath = join(resolvedPath, "manifest.json");
|
||||
if (existsSync(manifestPath)) {
|
||||
if (nonInteractive) {
|
||||
console.log("manifest.json already exists. Use --force to overwrite in non-interactive mode.");
|
||||
return false;
|
||||
}
|
||||
const overwrite = await confirm({
|
||||
message: "manifest.json already exists. Overwrite?",
|
||||
default: false,
|
||||
});
|
||||
if (!overwrite) {
|
||||
console.log("Cancelled");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!nonInteractive) {
|
||||
console.log("This utility will help you create a manifest.json file for your MCPB bundle.");
|
||||
console.log("Press ^C at any time to quit.\n");
|
||||
}
|
||||
else {
|
||||
console.log("Creating manifest.json with default values...");
|
||||
}
|
||||
try {
|
||||
const packageData = readPackageJson(resolvedPath);
|
||||
// Prompt for all information or use defaults
|
||||
const basicInfo = nonInteractive
|
||||
? getDefaultBasicInfo(packageData, resolvedPath)
|
||||
: await promptBasicInfo(packageData, resolvedPath);
|
||||
const longDescription = nonInteractive
|
||||
? undefined
|
||||
: await promptLongDescription(basicInfo.description);
|
||||
const authorInfo = nonInteractive
|
||||
? getDefaultAuthorInfo(packageData)
|
||||
: await promptAuthorInfo(packageData);
|
||||
const urls = nonInteractive
|
||||
? { homepage: "", documentation: "", support: "" }
|
||||
: await promptUrls();
|
||||
const visualAssets = nonInteractive
|
||||
? { icon: "", screenshots: [] }
|
||||
: await promptVisualAssets();
|
||||
const serverConfig = nonInteractive
|
||||
? getDefaultServerConfig(packageData)
|
||||
: await promptServerConfig(packageData);
|
||||
const toolsData = nonInteractive
|
||||
? { tools: [], toolsGenerated: false }
|
||||
: await promptTools();
|
||||
const promptsData = nonInteractive
|
||||
? { prompts: [], promptsGenerated: false }
|
||||
: await promptPrompts();
|
||||
const compatibility = nonInteractive
|
||||
? undefined
|
||||
: await promptCompatibility(serverConfig.serverType);
|
||||
const userConfig = nonInteractive ? {} : await promptUserConfig();
|
||||
const optionalFields = nonInteractive
|
||||
? getDefaultOptionalFields(packageData)
|
||||
: await promptOptionalFields(packageData);
|
||||
// Build manifest
|
||||
const manifest = buildManifest(basicInfo, longDescription, authorInfo, urls, visualAssets, serverConfig, toolsData.tools, toolsData.toolsGenerated, promptsData.prompts, promptsData.promptsGenerated, compatibility, userConfig, optionalFields);
|
||||
// Write manifest
|
||||
writeFileSync(manifestPath, JSON.stringify(manifest, null, 2) + "\n");
|
||||
console.log(`\nCreated manifest.json at ${manifestPath}`);
|
||||
printNextSteps();
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error && error.message.includes("User force closed")) {
|
||||
console.log("\nCancelled");
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
200
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/pack.js
generated
vendored
200
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/pack.js
generated
vendored
@@ -1,200 +0,0 @@
|
||||
import { confirm } from "@inquirer/prompts";
|
||||
import { createHash } from "crypto";
|
||||
import { zipSync } from "fflate";
|
||||
import { existsSync, mkdirSync, readFileSync, statSync, writeFileSync, } from "fs";
|
||||
import { basename, join, relative, resolve, sep } from "path";
|
||||
import { getAllFilesWithCount, readMcpbIgnorePatterns } from "../node/files.js";
|
||||
import { validateManifest } from "../node/validate.js";
|
||||
import { CURRENT_MANIFEST_VERSION, McpbManifestSchema } from "../schemas.js";
|
||||
import { getLogger } from "../shared/log.js";
|
||||
import { initExtension } from "./init.js";
|
||||
function formatFileSize(bytes) {
|
||||
if (bytes < 1024) {
|
||||
return `${bytes}B`;
|
||||
}
|
||||
else if (bytes < 1024 * 1024) {
|
||||
return `${(bytes / 1024).toFixed(1)}kB`;
|
||||
}
|
||||
else {
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
|
||||
}
|
||||
}
|
||||
function sanitizeNameForFilename(name) {
|
||||
// Replace spaces with hyphens
|
||||
// Remove or replace characters that are problematic in filenames
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, "-") // Replace spaces with hyphens
|
||||
.replace(/[^a-z0-9-_.]/g, "") // Keep only alphanumeric, hyphens, underscores, and dots
|
||||
.replace(/-+/g, "-") // Replace multiple hyphens with single hyphen
|
||||
.replace(/^-+|-+$/g, "") // Remove leading/trailing hyphens
|
||||
.substring(0, 100); // Limit length to 100 characters
|
||||
}
|
||||
export async function packExtension({ extensionPath, outputPath, silent, }) {
|
||||
const resolvedPath = resolve(extensionPath);
|
||||
const logger = getLogger({ silent });
|
||||
// Check if directory exists
|
||||
if (!existsSync(resolvedPath) || !statSync(resolvedPath).isDirectory()) {
|
||||
logger.error(`ERROR: Directory not found: ${extensionPath}`);
|
||||
return false;
|
||||
}
|
||||
// Check if manifest exists
|
||||
const manifestPath = join(resolvedPath, "manifest.json");
|
||||
if (!existsSync(manifestPath)) {
|
||||
logger.log(`No manifest.json found in ${extensionPath}`);
|
||||
const shouldInit = await confirm({
|
||||
message: "Would you like to create a manifest.json file?",
|
||||
default: true,
|
||||
});
|
||||
if (shouldInit) {
|
||||
const success = await initExtension(extensionPath);
|
||||
if (!success) {
|
||||
logger.error("ERROR: Failed to create manifest");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: Cannot pack extension without manifest.json");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Validate manifest first
|
||||
logger.log("Validating manifest...");
|
||||
if (!validateManifest(manifestPath)) {
|
||||
logger.error("ERROR: Cannot pack extension with invalid manifest");
|
||||
return false;
|
||||
}
|
||||
// Read and parse manifest
|
||||
let manifest;
|
||||
try {
|
||||
const manifestContent = readFileSync(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(manifestContent);
|
||||
manifest = McpbManifestSchema.parse(manifestData);
|
||||
}
|
||||
catch (error) {
|
||||
logger.error("ERROR: Failed to parse manifest.json");
|
||||
if (error instanceof Error) {
|
||||
logger.error(` ${error.message}`);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const manifestVersion = manifest.manifest_version || manifest.dxt_version;
|
||||
if (manifestVersion !== CURRENT_MANIFEST_VERSION) {
|
||||
logger.error(`ERROR: Manifest version mismatch. Expected "${CURRENT_MANIFEST_VERSION}", found "${manifestVersion}"`);
|
||||
logger.error(` Please update the manifest_version in your manifest.json to "${CURRENT_MANIFEST_VERSION}"`);
|
||||
return false;
|
||||
}
|
||||
// Determine output path
|
||||
const extensionName = basename(resolvedPath);
|
||||
const finalOutputPath = outputPath
|
||||
? resolve(outputPath)
|
||||
: resolve(`${extensionName}.mcpb`);
|
||||
// Ensure output directory exists
|
||||
const outputDir = join(finalOutputPath, "..");
|
||||
mkdirSync(outputDir, { recursive: true });
|
||||
try {
|
||||
// Read .mcpbignore patterns if present
|
||||
const mcpbIgnorePatterns = readMcpbIgnorePatterns(resolvedPath);
|
||||
// Get all files in the extension directory
|
||||
const { files, ignoredCount } = getAllFilesWithCount(resolvedPath, resolvedPath, {}, mcpbIgnorePatterns);
|
||||
// Print package header
|
||||
logger.log(`\n📦 ${manifest.name}@${manifest.version}`);
|
||||
// Print file list
|
||||
logger.log("Archive Contents");
|
||||
const fileEntries = Object.entries(files);
|
||||
let totalUnpackedSize = 0;
|
||||
// Sort files for consistent output
|
||||
fileEntries.sort(([a], [b]) => a.localeCompare(b));
|
||||
// Group files by directory for deep nesting
|
||||
const directoryGroups = new Map();
|
||||
const shallowFiles = [];
|
||||
for (const [filePath, fileData] of fileEntries) {
|
||||
const relPath = relative(resolvedPath, filePath);
|
||||
const content = fileData.data;
|
||||
const size = typeof content === "string"
|
||||
? Buffer.byteLength(content, "utf8")
|
||||
: content.length;
|
||||
totalUnpackedSize += size;
|
||||
// Check if file is deeply nested (3+ levels)
|
||||
const parts = relPath.split(sep);
|
||||
if (parts.length > 3) {
|
||||
// Group by the first 3 directory levels
|
||||
const groupKey = parts.slice(0, 3).join("/");
|
||||
if (!directoryGroups.has(groupKey)) {
|
||||
directoryGroups.set(groupKey, { files: [], totalSize: 0 });
|
||||
}
|
||||
const group = directoryGroups.get(groupKey);
|
||||
group.files.push(relPath);
|
||||
group.totalSize += size;
|
||||
}
|
||||
else {
|
||||
shallowFiles.push({ path: relPath, size });
|
||||
}
|
||||
}
|
||||
// Print shallow files first
|
||||
for (const { path, size } of shallowFiles) {
|
||||
logger.log(`${formatFileSize(size).padStart(8)} ${path}`);
|
||||
}
|
||||
// Print grouped directories
|
||||
for (const [dir, { files, totalSize }] of directoryGroups) {
|
||||
if (files.length === 1) {
|
||||
// If only one file in the group, print it normally
|
||||
const filePath = files[0];
|
||||
const fileSize = totalSize;
|
||||
logger.log(`${formatFileSize(fileSize).padStart(8)} ${filePath}`);
|
||||
}
|
||||
else {
|
||||
// Print directory summary
|
||||
logger.log(`${formatFileSize(totalSize).padStart(8)} ${dir}/ [and ${files.length} more files]`);
|
||||
}
|
||||
}
|
||||
// Create zip with preserved file permissions
|
||||
const zipFiles = {};
|
||||
const isUnix = process.platform !== "win32";
|
||||
for (const [filePath, fileData] of Object.entries(files)) {
|
||||
if (isUnix) {
|
||||
// Set external file attributes to preserve Unix permissions
|
||||
// The mode needs to be shifted to the upper 16 bits for ZIP format
|
||||
zipFiles[filePath] = [
|
||||
fileData.data,
|
||||
{ os: 3, attrs: (fileData.mode & 0o777) << 16 },
|
||||
];
|
||||
}
|
||||
else {
|
||||
// On Windows, use default ZIP attributes (no Unix permissions)
|
||||
zipFiles[filePath] = fileData.data;
|
||||
}
|
||||
}
|
||||
const zipData = zipSync(zipFiles, {
|
||||
level: 9, // Maximum compression
|
||||
mtime: new Date(),
|
||||
});
|
||||
// Write zip file
|
||||
writeFileSync(finalOutputPath, zipData);
|
||||
// Calculate SHA sum
|
||||
const shasum = createHash("sha1").update(zipData).digest("hex");
|
||||
// Print archive details
|
||||
const sanitizedName = sanitizeNameForFilename(manifest.name);
|
||||
const archiveName = `${sanitizedName}-${manifest.version}.mcpb`;
|
||||
logger.log("\nArchive Details");
|
||||
logger.log(`name: ${manifest.name}`);
|
||||
logger.log(`version: ${manifest.version}`);
|
||||
logger.log(`filename: ${archiveName}`);
|
||||
logger.log(`package size: ${formatFileSize(zipData.length)}`);
|
||||
logger.log(`unpacked size: ${formatFileSize(totalUnpackedSize)}`);
|
||||
logger.log(`shasum: ${shasum}`);
|
||||
logger.log(`total files: ${fileEntries.length}`);
|
||||
logger.log(`ignored (.mcpbignore) files: ${ignoredCount}`);
|
||||
logger.log(`\nOutput: ${finalOutputPath}`);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(`ERROR: Archive error: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: Unknown archive error occurred");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
101
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/unpack.js
generated
vendored
101
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/unpack.js
generated
vendored
@@ -1,101 +0,0 @@
|
||||
import { unzipSync } from "fflate";
|
||||
import { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync, } from "fs";
|
||||
import { join, resolve, sep } from "path";
|
||||
import { extractSignatureBlock } from "../node/sign.js";
|
||||
import { getLogger } from "../shared/log.js";
|
||||
export async function unpackExtension({ mcpbPath, outputDir, silent, }) {
|
||||
const logger = getLogger({ silent });
|
||||
const resolvedMcpbPath = resolve(mcpbPath);
|
||||
if (!existsSync(resolvedMcpbPath)) {
|
||||
logger.error(`ERROR: MCPB file not found: ${mcpbPath}`);
|
||||
return false;
|
||||
}
|
||||
const finalOutputDir = outputDir ? resolve(outputDir) : process.cwd();
|
||||
if (!existsSync(finalOutputDir)) {
|
||||
mkdirSync(finalOutputDir, { recursive: true });
|
||||
}
|
||||
try {
|
||||
const fileContent = readFileSync(resolvedMcpbPath);
|
||||
const { originalContent } = extractSignatureBlock(fileContent);
|
||||
// Parse file attributes from ZIP central directory
|
||||
const fileAttributes = new Map();
|
||||
const isUnix = process.platform !== "win32";
|
||||
if (isUnix) {
|
||||
// Parse ZIP central directory to extract file attributes
|
||||
const zipBuffer = originalContent;
|
||||
// Find end of central directory record
|
||||
let eocdOffset = -1;
|
||||
for (let i = zipBuffer.length - 22; i >= 0; i--) {
|
||||
if (zipBuffer.readUInt32LE(i) === 0x06054b50) {
|
||||
eocdOffset = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (eocdOffset !== -1) {
|
||||
const centralDirOffset = zipBuffer.readUInt32LE(eocdOffset + 16);
|
||||
const centralDirEntries = zipBuffer.readUInt16LE(eocdOffset + 8);
|
||||
let offset = centralDirOffset;
|
||||
for (let i = 0; i < centralDirEntries; i++) {
|
||||
if (zipBuffer.readUInt32LE(offset) === 0x02014b50) {
|
||||
const externalAttrs = zipBuffer.readUInt32LE(offset + 38);
|
||||
const filenameLength = zipBuffer.readUInt16LE(offset + 28);
|
||||
const filename = zipBuffer.toString("utf8", offset + 46, offset + 46 + filenameLength);
|
||||
// Extract Unix permissions from external attributes (upper 16 bits)
|
||||
const mode = (externalAttrs >> 16) & 0o777;
|
||||
if (mode > 0) {
|
||||
fileAttributes.set(filename, mode);
|
||||
}
|
||||
const extraFieldLength = zipBuffer.readUInt16LE(offset + 30);
|
||||
const commentLength = zipBuffer.readUInt16LE(offset + 32);
|
||||
offset += 46 + filenameLength + extraFieldLength + commentLength;
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const decompressed = unzipSync(originalContent);
|
||||
for (const relativePath in decompressed) {
|
||||
if (Object.prototype.hasOwnProperty.call(decompressed, relativePath)) {
|
||||
const data = decompressed[relativePath];
|
||||
const fullPath = join(finalOutputDir, relativePath);
|
||||
// Prevent zip slip attacks by validating the resolved path
|
||||
const normalizedPath = resolve(fullPath);
|
||||
const normalizedOutputDir = resolve(finalOutputDir);
|
||||
if (!normalizedPath.startsWith(normalizedOutputDir + sep) &&
|
||||
normalizedPath !== normalizedOutputDir) {
|
||||
throw new Error(`Path traversal attempt detected: ${relativePath}`);
|
||||
}
|
||||
const dir = join(fullPath, "..");
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
writeFileSync(fullPath, data);
|
||||
// Restore Unix file permissions if available
|
||||
if (isUnix && fileAttributes.has(relativePath)) {
|
||||
try {
|
||||
const mode = fileAttributes.get(relativePath);
|
||||
if (mode !== undefined) {
|
||||
chmodSync(fullPath, mode);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Silently ignore permission errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.log(`Extension unpacked successfully to ${finalOutputDir}`);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(`ERROR: Failed to unpack extension: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: An unknown error occurred during unpacking.");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
10
extracted-source/node_modules/@anthropic-ai/mcpb/dist/index.js
generated
vendored
10
extracted-source/node_modules/@anthropic-ai/mcpb/dist/index.js
generated
vendored
@@ -1,10 +0,0 @@
|
||||
// Default export includes everything (backward compatibility)
|
||||
export * from "./cli/init.js";
|
||||
export * from "./cli/pack.js";
|
||||
export * from "./cli/unpack.js";
|
||||
export * from "./node/files.js";
|
||||
export * from "./node/sign.js";
|
||||
export * from "./node/validate.js";
|
||||
export * from "./schemas.js";
|
||||
export * from "./shared/config.js";
|
||||
export * from "./types.js";
|
||||
115
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/files.js
generated
vendored
115
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/files.js
generated
vendored
@@ -1,115 +0,0 @@
|
||||
import { existsSync, readdirSync, readFileSync, statSync } from "fs";
|
||||
import ignore from "ignore";
|
||||
import { join, relative, sep } from "path";
|
||||
// Files/patterns to exclude from the package
|
||||
export const EXCLUDE_PATTERNS = [
|
||||
".DS_Store",
|
||||
"Thumbs.db",
|
||||
".gitignore",
|
||||
".git",
|
||||
".mcpbignore",
|
||||
"*.log",
|
||||
".env*",
|
||||
".npm",
|
||||
".npmrc",
|
||||
".yarnrc",
|
||||
".yarn",
|
||||
".eslintrc",
|
||||
".editorconfig",
|
||||
".prettierrc",
|
||||
".prettierignore",
|
||||
".eslintignore",
|
||||
".nycrc",
|
||||
".babelrc",
|
||||
".pnp.*",
|
||||
"node_modules/.cache",
|
||||
"node_modules/.bin",
|
||||
"*.map",
|
||||
".env.local",
|
||||
".env.*.local",
|
||||
"npm-debug.log*",
|
||||
"yarn-debug.log*",
|
||||
"yarn-error.log*",
|
||||
"package-lock.json",
|
||||
"yarn.lock",
|
||||
"*.mcpb",
|
||||
"*.d.ts",
|
||||
"*.tsbuildinfo",
|
||||
"tsconfig.json",
|
||||
];
|
||||
/**
|
||||
* Read and parse .mcpbignore file patterns
|
||||
*/
|
||||
export function readMcpbIgnorePatterns(baseDir) {
|
||||
const mcpbIgnorePath = join(baseDir, ".mcpbignore");
|
||||
if (!existsSync(mcpbIgnorePath)) {
|
||||
return [];
|
||||
}
|
||||
try {
|
||||
const content = readFileSync(mcpbIgnorePath, "utf-8");
|
||||
return content
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0 && !line.startsWith("#"));
|
||||
}
|
||||
catch (error) {
|
||||
console.warn(`Warning: Could not read .mcpbignore file: ${error instanceof Error ? error.message : "Unknown error"}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
function buildIgnoreChecker(additionalPatterns) {
|
||||
return ignore().add(EXCLUDE_PATTERNS).add(additionalPatterns);
|
||||
}
|
||||
/**
|
||||
* Used for testing, calls the same methods as the other ignore checks
|
||||
*/
|
||||
export function shouldExclude(filePath, additionalPatterns = []) {
|
||||
return buildIgnoreChecker(additionalPatterns).ignores(filePath);
|
||||
}
|
||||
export function getAllFiles(dirPath, baseDir = dirPath, fileList = {}, additionalPatterns = []) {
|
||||
const files = readdirSync(dirPath);
|
||||
const ignoreChecker = buildIgnoreChecker(additionalPatterns);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file);
|
||||
const relativePath = relative(baseDir, filePath);
|
||||
if (ignoreChecker.ignores(relativePath)) {
|
||||
continue;
|
||||
}
|
||||
const stat = statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
getAllFiles(filePath, baseDir, fileList, additionalPatterns);
|
||||
}
|
||||
else {
|
||||
// Use forward slashes in zip file paths
|
||||
const zipPath = relativePath.split(sep).join("/");
|
||||
fileList[zipPath] = readFileSync(filePath);
|
||||
}
|
||||
}
|
||||
return fileList;
|
||||
}
|
||||
export function getAllFilesWithCount(dirPath, baseDir = dirPath, fileList = {}, additionalPatterns = [], ignoredCount = 0) {
|
||||
const files = readdirSync(dirPath);
|
||||
const ignoreChecker = buildIgnoreChecker(additionalPatterns);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file);
|
||||
const relativePath = relative(baseDir, filePath);
|
||||
if (ignoreChecker.ignores(relativePath)) {
|
||||
ignoredCount++;
|
||||
continue;
|
||||
}
|
||||
const stat = statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
const result = getAllFilesWithCount(filePath, baseDir, fileList, additionalPatterns, ignoredCount);
|
||||
ignoredCount = result.ignoredCount;
|
||||
}
|
||||
else {
|
||||
// Use forward slashes in zip file paths
|
||||
const zipPath = relativePath.split(sep).join("/");
|
||||
fileList[zipPath] = {
|
||||
data: readFileSync(filePath),
|
||||
mode: stat.mode,
|
||||
};
|
||||
}
|
||||
}
|
||||
return { files: fileList, ignoredCount };
|
||||
}
|
||||
333
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/sign.js
generated
vendored
333
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/sign.js
generated
vendored
@@ -1,333 +0,0 @@
|
||||
import { execFile } from "child_process";
|
||||
import { readFileSync, writeFileSync } from "fs";
|
||||
import { mkdtemp, rm, writeFile } from "fs/promises";
|
||||
import forge from "node-forge";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path";
|
||||
import { promisify } from "util";
|
||||
// Signature block markers
|
||||
const SIGNATURE_HEADER = "MCPB_SIG_V1";
|
||||
const SIGNATURE_FOOTER = "MCPB_SIG_END";
|
||||
const execFileAsync = promisify(execFile);
|
||||
/**
|
||||
* Signs a MCPB file with the given certificate and private key using PKCS#7
|
||||
*
|
||||
* @param mcpbPath Path to the MCPB file to sign
|
||||
* @param certPath Path to the certificate file (PEM format)
|
||||
* @param keyPath Path to the private key file (PEM format)
|
||||
* @param intermediates Optional array of intermediate certificate paths
|
||||
*/
|
||||
export function signMcpbFile(mcpbPath, certPath, keyPath, intermediates) {
|
||||
// Read the original MCPB file
|
||||
const mcpbContent = readFileSync(mcpbPath);
|
||||
// Read certificate and key
|
||||
const certificatePem = readFileSync(certPath, "utf-8");
|
||||
const privateKeyPem = readFileSync(keyPath, "utf-8");
|
||||
// Read intermediate certificates if provided
|
||||
const intermediatePems = intermediates?.map((path) => readFileSync(path, "utf-8"));
|
||||
// Create PKCS#7 signed data
|
||||
const p7 = forge.pkcs7.createSignedData();
|
||||
p7.content = forge.util.createBuffer(mcpbContent);
|
||||
// Parse and add certificates
|
||||
const signingCert = forge.pki.certificateFromPem(certificatePem);
|
||||
const privateKey = forge.pki.privateKeyFromPem(privateKeyPem);
|
||||
p7.addCertificate(signingCert);
|
||||
// Add intermediate certificates
|
||||
if (intermediatePems) {
|
||||
for (const pem of intermediatePems) {
|
||||
p7.addCertificate(forge.pki.certificateFromPem(pem));
|
||||
}
|
||||
}
|
||||
// Add signer
|
||||
p7.addSigner({
|
||||
key: privateKey,
|
||||
certificate: signingCert,
|
||||
digestAlgorithm: forge.pki.oids.sha256,
|
||||
authenticatedAttributes: [
|
||||
{
|
||||
type: forge.pki.oids.contentType,
|
||||
value: forge.pki.oids.data,
|
||||
},
|
||||
{
|
||||
type: forge.pki.oids.messageDigest,
|
||||
// Value will be auto-populated
|
||||
},
|
||||
{
|
||||
type: forge.pki.oids.signingTime,
|
||||
// Value will be auto-populated with current time
|
||||
},
|
||||
],
|
||||
});
|
||||
// Sign with detached signature
|
||||
p7.sign({ detached: true });
|
||||
// Convert to DER format
|
||||
const asn1 = forge.asn1.toDer(p7.toAsn1());
|
||||
const pkcs7Signature = Buffer.from(asn1.getBytes(), "binary");
|
||||
// Create signature block with PKCS#7 data
|
||||
const signatureBlock = createSignatureBlock(pkcs7Signature);
|
||||
// Append signature block to MCPB file
|
||||
const signedContent = Buffer.concat([mcpbContent, signatureBlock]);
|
||||
writeFileSync(mcpbPath, signedContent);
|
||||
}
|
||||
/**
|
||||
* Verifies a signed MCPB file using OS certificate store
|
||||
*
|
||||
* @param mcpbPath Path to the signed MCPB file
|
||||
* @returns Signature information including verification status
|
||||
*/
|
||||
export async function verifyMcpbFile(mcpbPath) {
|
||||
try {
|
||||
const fileContent = readFileSync(mcpbPath);
|
||||
// Find and extract signature block
|
||||
const { originalContent, pkcs7Signature } = extractSignatureBlock(fileContent);
|
||||
if (!pkcs7Signature) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Parse PKCS#7 signature
|
||||
const asn1 = forge.asn1.fromDer(pkcs7Signature.toString("binary"));
|
||||
const p7Message = forge.pkcs7.messageFromAsn1(asn1);
|
||||
// Verify it's signed data and cast to correct type
|
||||
if (!("type" in p7Message) ||
|
||||
p7Message.type !== forge.pki.oids.signedData) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Now we know it's PkcsSignedData. The types are incorrect, so we'll
|
||||
// fix them there
|
||||
const p7 = p7Message;
|
||||
// Extract certificates from PKCS#7
|
||||
const certificates = p7.certificates || [];
|
||||
if (certificates.length === 0) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Get the signing certificate (first one)
|
||||
const signingCert = certificates[0];
|
||||
// Verify PKCS#7 signature
|
||||
const contentBuf = forge.util.createBuffer(originalContent);
|
||||
try {
|
||||
p7.verify({ authenticatedAttributes: true });
|
||||
// Also verify the content matches
|
||||
const signerInfos = p7.signerInfos;
|
||||
const signerInfo = signerInfos?.[0];
|
||||
if (signerInfo) {
|
||||
const md = forge.md.sha256.create();
|
||||
md.update(contentBuf.getBytes());
|
||||
const digest = md.digest().getBytes();
|
||||
// Find the message digest attribute
|
||||
let messageDigest = null;
|
||||
for (const attr of signerInfo.authenticatedAttributes) {
|
||||
if (attr.type === forge.pki.oids.messageDigest) {
|
||||
messageDigest = attr.value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!messageDigest || messageDigest !== digest) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Convert forge certificate to PEM for OS verification
|
||||
const certPem = forge.pki.certificateToPem(signingCert);
|
||||
const intermediatePems = certificates
|
||||
.slice(1)
|
||||
.map((cert) => Buffer.from(forge.pki.certificateToPem(cert)));
|
||||
// Verify certificate chain against OS trust store
|
||||
const chainValid = await verifyCertificateChain(Buffer.from(certPem), intermediatePems);
|
||||
if (!chainValid) {
|
||||
// Signature is valid but certificate is not trusted
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Extract certificate info
|
||||
const isSelfSigned = signingCert.issuer.getField("CN")?.value ===
|
||||
signingCert.subject.getField("CN")?.value;
|
||||
return {
|
||||
status: isSelfSigned ? "self-signed" : "signed",
|
||||
publisher: signingCert.subject.getField("CN")?.value || "Unknown",
|
||||
issuer: signingCert.issuer.getField("CN")?.value || "Unknown",
|
||||
valid_from: signingCert.validity.notBefore.toISOString(),
|
||||
valid_to: signingCert.validity.notAfter.toISOString(),
|
||||
fingerprint: forge.md.sha256
|
||||
.create()
|
||||
.update(forge.asn1.toDer(forge.pki.certificateToAsn1(signingCert)).getBytes())
|
||||
.digest()
|
||||
.toHex(),
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to verify MCPB file: ${error}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Creates a signature block buffer with PKCS#7 signature
|
||||
*/
|
||||
function createSignatureBlock(pkcs7Signature) {
|
||||
const parts = [];
|
||||
// Header
|
||||
parts.push(Buffer.from(SIGNATURE_HEADER, "utf-8"));
|
||||
// PKCS#7 signature length and data
|
||||
const sigLengthBuffer = Buffer.alloc(4);
|
||||
sigLengthBuffer.writeUInt32LE(pkcs7Signature.length, 0);
|
||||
parts.push(sigLengthBuffer);
|
||||
parts.push(pkcs7Signature);
|
||||
// Footer
|
||||
parts.push(Buffer.from(SIGNATURE_FOOTER, "utf-8"));
|
||||
return Buffer.concat(parts);
|
||||
}
|
||||
/**
|
||||
* Extracts the signature block from a signed MCPB file
|
||||
*/
|
||||
export function extractSignatureBlock(fileContent) {
|
||||
// Look for signature footer at the end
|
||||
const footerBytes = Buffer.from(SIGNATURE_FOOTER, "utf-8");
|
||||
const footerIndex = fileContent.lastIndexOf(footerBytes);
|
||||
if (footerIndex === -1) {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
// Look for signature header before footer
|
||||
const headerBytes = Buffer.from(SIGNATURE_HEADER, "utf-8");
|
||||
let headerIndex = -1;
|
||||
// Search backwards from footer
|
||||
for (let i = footerIndex - 1; i >= 0; i--) {
|
||||
if (fileContent.slice(i, i + headerBytes.length).equals(headerBytes)) {
|
||||
headerIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (headerIndex === -1) {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
// Extract original content (everything before signature block)
|
||||
const originalContent = fileContent.slice(0, headerIndex);
|
||||
// Parse signature block
|
||||
let offset = headerIndex + headerBytes.length;
|
||||
try {
|
||||
// Read PKCS#7 signature length
|
||||
const sigLength = fileContent.readUInt32LE(offset);
|
||||
offset += 4;
|
||||
// Read PKCS#7 signature
|
||||
const pkcs7Signature = fileContent.slice(offset, offset + sigLength);
|
||||
return {
|
||||
originalContent,
|
||||
pkcs7Signature,
|
||||
};
|
||||
}
|
||||
catch {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Verifies certificate chain against OS trust store
|
||||
*/
|
||||
export async function verifyCertificateChain(certificate, intermediates) {
|
||||
let tempDir = null;
|
||||
try {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "mcpb-verify-"));
|
||||
const certChainPath = join(tempDir, "chain.pem");
|
||||
const certChain = [certificate, ...(intermediates || [])].join("\n");
|
||||
await writeFile(certChainPath, certChain);
|
||||
// Platform-specific verification
|
||||
if (process.platform === "darwin") {
|
||||
try {
|
||||
await execFileAsync("security", [
|
||||
"verify-cert",
|
||||
"-c",
|
||||
certChainPath,
|
||||
"-p",
|
||||
"codeSign",
|
||||
]);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (process.platform === "win32") {
|
||||
const psCommand = `
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$certCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
|
||||
$certCollection.Import('${certChainPath}')
|
||||
|
||||
if ($certCollection.Count -eq 0) {
|
||||
Write-Error 'No certificates found'
|
||||
exit 1
|
||||
}
|
||||
|
||||
$leafCert = $certCollection[0]
|
||||
$chain = New-Object System.Security.Cryptography.X509Certificates.X509Chain
|
||||
|
||||
# Enable revocation checking
|
||||
$chain.ChainPolicy.RevocationMode = 'Online'
|
||||
$chain.ChainPolicy.RevocationFlag = 'EntireChain'
|
||||
$chain.ChainPolicy.UrlRetrievalTimeout = New-TimeSpan -Seconds 30
|
||||
|
||||
# Add code signing application policy
|
||||
$codeSignOid = New-Object System.Security.Cryptography.Oid '1.3.6.1.5.5.7.3.3'
|
||||
$chain.ChainPolicy.ApplicationPolicy.Add($codeSignOid)
|
||||
|
||||
# Add intermediate certificates to extra store
|
||||
for ($i = 1; $i -lt $certCollection.Count; $i++) {
|
||||
[void]$chain.ChainPolicy.ExtraStore.Add($certCollection[$i])
|
||||
}
|
||||
|
||||
# Build and validate chain
|
||||
$result = $chain.Build($leafCert)
|
||||
|
||||
if ($result) {
|
||||
'Valid'
|
||||
} else {
|
||||
$chain.ChainStatus | ForEach-Object {
|
||||
Write-Error "$($_.Status): $($_.StatusInformation)"
|
||||
}
|
||||
exit 1
|
||||
}
|
||||
`.trim();
|
||||
const { stdout } = await execFileAsync("powershell.exe", [
|
||||
"-NoProfile",
|
||||
"-NonInteractive",
|
||||
"-Command",
|
||||
psCommand,
|
||||
]);
|
||||
return stdout.includes("Valid");
|
||||
}
|
||||
else {
|
||||
// Linux: Use openssl
|
||||
try {
|
||||
await execFileAsync("openssl", [
|
||||
"verify",
|
||||
"-purpose",
|
||||
"codesigning",
|
||||
"-CApath",
|
||||
"/etc/ssl/certs",
|
||||
certChainPath,
|
||||
]);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
finally {
|
||||
if (tempDir) {
|
||||
try {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes signature from a MCPB file
|
||||
*/
|
||||
export function unsignMcpbFile(mcpbPath) {
|
||||
const fileContent = readFileSync(mcpbPath);
|
||||
const { originalContent } = extractSignatureBlock(fileContent);
|
||||
writeFileSync(mcpbPath, originalContent);
|
||||
}
|
||||
124
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/validate.js
generated
vendored
124
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/validate.js
generated
vendored
@@ -1,124 +0,0 @@
|
||||
import { existsSync, readFileSync, statSync } from "fs";
|
||||
import * as fs from "fs/promises";
|
||||
import { DestroyerOfModules } from "galactus";
|
||||
import * as os from "os";
|
||||
import { join, resolve } from "path";
|
||||
import prettyBytes from "pretty-bytes";
|
||||
import { unpackExtension } from "../cli/unpack.js";
|
||||
import { McpbManifestSchema } from "../schemas.js";
|
||||
import { McpbManifestSchema as LooseMcpbManifestSchema } from "../schemas-loose.js";
|
||||
export function validateManifest(inputPath) {
|
||||
try {
|
||||
const resolvedPath = resolve(inputPath);
|
||||
let manifestPath = resolvedPath;
|
||||
// If input is a directory, look for manifest.json inside it
|
||||
if (existsSync(resolvedPath) && statSync(resolvedPath).isDirectory()) {
|
||||
manifestPath = join(resolvedPath, "manifest.json");
|
||||
}
|
||||
const manifestContent = readFileSync(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(manifestContent);
|
||||
const result = McpbManifestSchema.safeParse(manifestData);
|
||||
if (result.success) {
|
||||
console.log("Manifest schema validation passes!");
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
console.log("ERROR: Manifest validation failed:\n");
|
||||
result.error.issues.forEach((issue) => {
|
||||
const path = issue.path.join(".");
|
||||
console.log(` - ${path ? `${path}: ` : ""}${issue.message}`);
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ENOENT")) {
|
||||
console.error(`ERROR: File not found: ${inputPath}`);
|
||||
if (existsSync(resolve(inputPath)) &&
|
||||
statSync(resolve(inputPath)).isDirectory()) {
|
||||
console.error(` (No manifest.json found in directory)`);
|
||||
}
|
||||
}
|
||||
else if (error.message.includes("JSON")) {
|
||||
console.error(`ERROR: Invalid JSON in manifest file: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
console.error(`ERROR: Error reading manifest: ${error.message}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.error("ERROR: Unknown error occurred");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export async function cleanMcpb(inputPath) {
|
||||
const tmpDir = await fs.mkdtemp(resolve(os.tmpdir(), "mcpb-clean-"));
|
||||
const mcpbPath = resolve(tmpDir, "in.mcpb");
|
||||
const unpackPath = resolve(tmpDir, "out");
|
||||
console.log(" -- Cleaning MCPB...");
|
||||
try {
|
||||
await fs.copyFile(inputPath, mcpbPath);
|
||||
console.log(" -- Unpacking MCPB...");
|
||||
await unpackExtension({ mcpbPath, silent: true, outputDir: unpackPath });
|
||||
const manifestPath = resolve(unpackPath, "manifest.json");
|
||||
const originalManifest = await fs.readFile(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(originalManifest);
|
||||
const result = LooseMcpbManifestSchema.safeParse(manifestData);
|
||||
if (!result.success) {
|
||||
throw new Error(`Unrecoverable manifest issues, please run "mcpb validate"`);
|
||||
}
|
||||
await fs.writeFile(manifestPath, JSON.stringify(result.data, null, 2));
|
||||
if (originalManifest.trim() !==
|
||||
(await fs.readFile(manifestPath, "utf8")).trim()) {
|
||||
console.log(" -- Update manifest to be valid per MCPB schema");
|
||||
}
|
||||
else {
|
||||
console.log(" -- Manifest already valid per MCPB schema");
|
||||
}
|
||||
const nodeModulesPath = resolve(unpackPath, "node_modules");
|
||||
if (existsSync(nodeModulesPath)) {
|
||||
console.log(" -- node_modules found, deleting development dependencies");
|
||||
const destroyer = new DestroyerOfModules({
|
||||
rootDirectory: unpackPath,
|
||||
});
|
||||
try {
|
||||
await destroyer.destroy();
|
||||
}
|
||||
catch (error) {
|
||||
// If modules have already been deleted in a previous clean, the walker
|
||||
// will fail when it can't find required dependencies. This is expected
|
||||
// and safe to ignore.
|
||||
if (error instanceof Error &&
|
||||
error.message.includes("Failed to locate module")) {
|
||||
console.log(" -- Some modules already removed, skipping remaining cleanup");
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
console.log(" -- Removed development dependencies from node_modules");
|
||||
}
|
||||
else {
|
||||
console.log(" -- No node_modules, not pruning");
|
||||
}
|
||||
const before = await fs.stat(inputPath);
|
||||
const { packExtension } = await import("../cli/pack.js");
|
||||
await packExtension({
|
||||
extensionPath: unpackPath,
|
||||
outputPath: inputPath,
|
||||
silent: true,
|
||||
});
|
||||
const after = await fs.stat(inputPath);
|
||||
console.log("\nClean Complete:");
|
||||
console.log("Before:", prettyBytes(before.size));
|
||||
console.log("After:", prettyBytes(after.size));
|
||||
}
|
||||
finally {
|
||||
await fs.rm(tmpDir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
105
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas-loose.js
generated
vendored
105
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas-loose.js
generated
vendored
@@ -1,105 +0,0 @@
|
||||
import * as z from "zod";
|
||||
export const McpServerConfigSchema = z.object({
|
||||
command: z.string(),
|
||||
args: z.array(z.string()).optional(),
|
||||
env: z.record(z.string(), z.string()).optional(),
|
||||
});
|
||||
export const McpbManifestAuthorSchema = z.object({
|
||||
name: z.string(),
|
||||
email: z.string().email().optional(),
|
||||
url: z.string().url().optional(),
|
||||
});
|
||||
export const McpbManifestRepositorySchema = z.object({
|
||||
type: z.string(),
|
||||
url: z.string().url(),
|
||||
});
|
||||
export const McpbManifestPlatformOverrideSchema = McpServerConfigSchema.partial();
|
||||
export const McpbManifestMcpConfigSchema = McpServerConfigSchema.extend({
|
||||
platform_overrides: z
|
||||
.record(z.string(), McpbManifestPlatformOverrideSchema)
|
||||
.optional(),
|
||||
});
|
||||
export const McpbManifestServerSchema = z.object({
|
||||
type: z.enum(["python", "node", "binary"]),
|
||||
entry_point: z.string(),
|
||||
mcp_config: McpbManifestMcpConfigSchema,
|
||||
});
|
||||
export const McpbManifestCompatibilitySchema = z
|
||||
.object({
|
||||
claude_desktop: z.string().optional(),
|
||||
platforms: z.array(z.enum(["darwin", "win32", "linux"])).optional(),
|
||||
runtimes: z
|
||||
.object({
|
||||
python: z.string().optional(),
|
||||
node: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.passthrough();
|
||||
export const McpbManifestToolSchema = z.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
});
|
||||
export const McpbManifestPromptSchema = z.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
arguments: z.array(z.string()).optional(),
|
||||
text: z.string(),
|
||||
});
|
||||
export const McpbUserConfigurationOptionSchema = z.object({
|
||||
type: z.enum(["string", "number", "boolean", "directory", "file"]),
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
required: z.boolean().optional(),
|
||||
default: z
|
||||
.union([z.string(), z.number(), z.boolean(), z.array(z.string())])
|
||||
.optional(),
|
||||
multiple: z.boolean().optional(),
|
||||
sensitive: z.boolean().optional(),
|
||||
min: z.number().optional(),
|
||||
max: z.number().optional(),
|
||||
});
|
||||
export const McpbUserConfigValuesSchema = z.record(z.string(), z.union([z.string(), z.number(), z.boolean(), z.array(z.string())]));
|
||||
export const McpbManifestSchema = z
|
||||
.object({
|
||||
$schema: z.string().optional(),
|
||||
dxt_version: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("@deprecated Use manifest_version instead"),
|
||||
manifest_version: z.string().optional(),
|
||||
name: z.string(),
|
||||
display_name: z.string().optional(),
|
||||
version: z.string(),
|
||||
description: z.string(),
|
||||
long_description: z.string().optional(),
|
||||
author: McpbManifestAuthorSchema,
|
||||
repository: McpbManifestRepositorySchema.optional(),
|
||||
homepage: z.string().url().optional(),
|
||||
documentation: z.string().url().optional(),
|
||||
support: z.string().url().optional(),
|
||||
icon: z.string().optional(),
|
||||
screenshots: z.array(z.string()).optional(),
|
||||
server: McpbManifestServerSchema,
|
||||
tools: z.array(McpbManifestToolSchema).optional(),
|
||||
tools_generated: z.boolean().optional(),
|
||||
prompts: z.array(McpbManifestPromptSchema).optional(),
|
||||
prompts_generated: z.boolean().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
license: z.string().optional(),
|
||||
compatibility: McpbManifestCompatibilitySchema.optional(),
|
||||
user_config: z
|
||||
.record(z.string(), McpbUserConfigurationOptionSchema)
|
||||
.optional(),
|
||||
})
|
||||
.refine((data) => !!(data.dxt_version || data.manifest_version), {
|
||||
message: "Either 'dxt_version' (deprecated) or 'manifest_version' must be provided",
|
||||
});
|
||||
export const McpbSignatureInfoSchema = z.object({
|
||||
status: z.enum(["signed", "unsigned", "self-signed"]),
|
||||
publisher: z.string().optional(),
|
||||
issuer: z.string().optional(),
|
||||
valid_from: z.string().optional(),
|
||||
valid_to: z.string().optional(),
|
||||
fingerprint: z.string().optional(),
|
||||
});
|
||||
107
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas.js
generated
vendored
107
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas.js
generated
vendored
@@ -1,107 +0,0 @@
|
||||
import * as z from "zod";
|
||||
export const CURRENT_MANIFEST_VERSION = "0.2";
|
||||
export const McpServerConfigSchema = z.strictObject({
|
||||
command: z.string(),
|
||||
args: z.array(z.string()).optional(),
|
||||
env: z.record(z.string(), z.string()).optional(),
|
||||
});
|
||||
export const McpbManifestAuthorSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
email: z.string().email().optional(),
|
||||
url: z.string().url().optional(),
|
||||
});
|
||||
export const McpbManifestRepositorySchema = z.strictObject({
|
||||
type: z.string(),
|
||||
url: z.string().url(),
|
||||
});
|
||||
export const McpbManifestPlatformOverrideSchema = McpServerConfigSchema.partial();
|
||||
export const McpbManifestMcpConfigSchema = McpServerConfigSchema.extend({
|
||||
platform_overrides: z
|
||||
.record(z.string(), McpbManifestPlatformOverrideSchema)
|
||||
.optional(),
|
||||
});
|
||||
export const McpbManifestServerSchema = z.strictObject({
|
||||
type: z.enum(["python", "node", "binary"]),
|
||||
entry_point: z.string(),
|
||||
mcp_config: McpbManifestMcpConfigSchema,
|
||||
});
|
||||
export const McpbManifestCompatibilitySchema = z
|
||||
.strictObject({
|
||||
claude_desktop: z.string().optional(),
|
||||
platforms: z.array(z.enum(["darwin", "win32", "linux"])).optional(),
|
||||
runtimes: z
|
||||
.strictObject({
|
||||
python: z.string().optional(),
|
||||
node: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.passthrough();
|
||||
export const McpbManifestToolSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
});
|
||||
export const McpbManifestPromptSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
arguments: z.array(z.string()).optional(),
|
||||
text: z.string(),
|
||||
});
|
||||
export const McpbUserConfigurationOptionSchema = z.strictObject({
|
||||
type: z.enum(["string", "number", "boolean", "directory", "file"]),
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
required: z.boolean().optional(),
|
||||
default: z
|
||||
.union([z.string(), z.number(), z.boolean(), z.array(z.string())])
|
||||
.optional(),
|
||||
multiple: z.boolean().optional(),
|
||||
sensitive: z.boolean().optional(),
|
||||
min: z.number().optional(),
|
||||
max: z.number().optional(),
|
||||
});
|
||||
export const McpbUserConfigValuesSchema = z.record(z.string(), z.union([z.string(), z.number(), z.boolean(), z.array(z.string())]));
|
||||
export const McpbManifestSchema = z
|
||||
.strictObject({
|
||||
$schema: z.string().optional(),
|
||||
dxt_version: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("@deprecated Use manifest_version instead"),
|
||||
manifest_version: z.string().optional(),
|
||||
name: z.string(),
|
||||
display_name: z.string().optional(),
|
||||
version: z.string(),
|
||||
description: z.string(),
|
||||
long_description: z.string().optional(),
|
||||
author: McpbManifestAuthorSchema,
|
||||
repository: McpbManifestRepositorySchema.optional(),
|
||||
homepage: z.string().url().optional(),
|
||||
documentation: z.string().url().optional(),
|
||||
support: z.string().url().optional(),
|
||||
icon: z.string().optional(),
|
||||
screenshots: z.array(z.string()).optional(),
|
||||
server: McpbManifestServerSchema,
|
||||
tools: z.array(McpbManifestToolSchema).optional(),
|
||||
tools_generated: z.boolean().optional(),
|
||||
prompts: z.array(McpbManifestPromptSchema).optional(),
|
||||
prompts_generated: z.boolean().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
license: z.string().optional(),
|
||||
privacy_policies: z.array(z.string()).optional(),
|
||||
compatibility: McpbManifestCompatibilitySchema.optional(),
|
||||
user_config: z
|
||||
.record(z.string(), McpbUserConfigurationOptionSchema)
|
||||
.optional(),
|
||||
})
|
||||
.refine((data) => !!(data.dxt_version || data.manifest_version), {
|
||||
message: "Either 'dxt_version' (deprecated) or 'manifest_version' must be provided",
|
||||
});
|
||||
export const McpbSignatureInfoSchema = z.strictObject({
|
||||
status: z.enum(["signed", "unsigned", "self-signed"]),
|
||||
publisher: z.string().optional(),
|
||||
issuer: z.string().optional(),
|
||||
valid_from: z.string().optional(),
|
||||
valid_to: z.string().optional(),
|
||||
fingerprint: z.string().optional(),
|
||||
});
|
||||
157
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/config.js
generated
vendored
157
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/config.js
generated
vendored
@@ -1,157 +0,0 @@
|
||||
/**
|
||||
* This file contains utility functions for handling MCPB configuration,
|
||||
* including variable replacement and MCP server configuration generation.
|
||||
*/
|
||||
/**
|
||||
* Recursively replaces variables in any value. Handles strings, arrays, and objects.
|
||||
*
|
||||
* @param value The value to process
|
||||
* @param variables Object containing variable replacements
|
||||
* @returns The processed value with all variables replaced
|
||||
*/
|
||||
export function replaceVariables(value, variables) {
|
||||
if (typeof value === "string") {
|
||||
let result = value;
|
||||
// Replace all variables in the string
|
||||
for (const [key, replacement] of Object.entries(variables)) {
|
||||
const pattern = new RegExp(`\\$\\{${key}\\}`, "g");
|
||||
// Check if this pattern actually exists in the string
|
||||
if (result.match(pattern)) {
|
||||
if (Array.isArray(replacement)) {
|
||||
console.warn(`Cannot replace ${key} with array value in string context: "${value}"`, { key, replacement });
|
||||
}
|
||||
else {
|
||||
result = result.replace(pattern, replacement);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
else if (Array.isArray(value)) {
|
||||
// For arrays, we need to handle special case of array expansion
|
||||
const result = [];
|
||||
for (const item of value) {
|
||||
if (typeof item === "string" &&
|
||||
item.match(/^\$\{user_config\.[^}]+\}$/)) {
|
||||
// This is a user config variable that might expand to multiple values
|
||||
const varName = item.match(/^\$\{([^}]+)\}$/)?.[1];
|
||||
if (varName && variables[varName]) {
|
||||
const replacement = variables[varName];
|
||||
if (Array.isArray(replacement)) {
|
||||
// Expand array inline
|
||||
result.push(...replacement);
|
||||
}
|
||||
else {
|
||||
result.push(replacement);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Variable not found, keep original
|
||||
result.push(item);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Recursively process non-variable items
|
||||
result.push(replaceVariables(item, variables));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
else if (value && typeof value === "object") {
|
||||
const result = {};
|
||||
for (const [key, val] of Object.entries(value)) {
|
||||
result[key] = replaceVariables(val, variables);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
export async function getMcpConfigForManifest(options) {
|
||||
const { manifest, extensionPath, systemDirs, userConfig, pathSeparator, logger, } = options;
|
||||
const baseConfig = manifest.server?.mcp_config;
|
||||
if (!baseConfig) {
|
||||
return undefined;
|
||||
}
|
||||
let result = {
|
||||
...baseConfig,
|
||||
};
|
||||
if (baseConfig.platform_overrides) {
|
||||
if (process.platform in baseConfig.platform_overrides) {
|
||||
const platformConfig = baseConfig.platform_overrides[process.platform];
|
||||
result.command = platformConfig.command || result.command;
|
||||
result.args = platformConfig.args || result.args;
|
||||
result.env = platformConfig.env || result.env;
|
||||
}
|
||||
}
|
||||
// Check if required configuration is missing
|
||||
if (hasRequiredConfigMissing({ manifest, userConfig })) {
|
||||
logger?.warn(`Extension ${manifest.name} has missing required configuration, skipping MCP config`);
|
||||
return undefined;
|
||||
}
|
||||
const variables = {
|
||||
__dirname: extensionPath,
|
||||
pathSeparator,
|
||||
"/": pathSeparator,
|
||||
...systemDirs,
|
||||
};
|
||||
// Build merged configuration from defaults and user settings
|
||||
const mergedConfig = {};
|
||||
// First, add defaults from manifest
|
||||
if (manifest.user_config) {
|
||||
for (const [key, configOption] of Object.entries(manifest.user_config)) {
|
||||
if (configOption.default !== undefined) {
|
||||
mergedConfig[key] = configOption.default;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Then, override with user settings
|
||||
if (userConfig) {
|
||||
Object.assign(mergedConfig, userConfig);
|
||||
}
|
||||
// Add merged configuration variables for substitution
|
||||
for (const [key, value] of Object.entries(mergedConfig)) {
|
||||
// Convert user config to the format expected by variable substitution
|
||||
const userConfigKey = `user_config.${key}`;
|
||||
if (Array.isArray(value)) {
|
||||
// Keep arrays as arrays for proper expansion
|
||||
variables[userConfigKey] = value.map(String);
|
||||
}
|
||||
else if (typeof value === "boolean") {
|
||||
// Convert booleans to "true"/"false" strings as per spec
|
||||
variables[userConfigKey] = value ? "true" : "false";
|
||||
}
|
||||
else {
|
||||
// Convert other types to strings
|
||||
variables[userConfigKey] = String(value);
|
||||
}
|
||||
}
|
||||
// Replace all variables in the config
|
||||
result = replaceVariables(result, variables);
|
||||
return result;
|
||||
}
|
||||
function isInvalidSingleValue(value) {
|
||||
return value === undefined || value === null || value === "";
|
||||
}
|
||||
/**
|
||||
* Check if an extension has missing required configuration
|
||||
* @param manifest The extension manifest
|
||||
* @param userConfig The user configuration
|
||||
* @returns true if required configuration is missing
|
||||
*/
|
||||
export function hasRequiredConfigMissing({ manifest, userConfig, }) {
|
||||
if (!manifest.user_config) {
|
||||
return false;
|
||||
}
|
||||
const config = userConfig || {};
|
||||
for (const [key, configOption] of Object.entries(manifest.user_config)) {
|
||||
if (configOption.required) {
|
||||
const value = config[key];
|
||||
if (isInvalidSingleValue(value) ||
|
||||
(Array.isArray(value) &&
|
||||
(value.length === 0 || value.some(isInvalidSingleValue)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
29
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/log.js
generated
vendored
29
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/log.js
generated
vendored
@@ -1,29 +0,0 @@
|
||||
export function getLogger({ silent = false } = {}) {
|
||||
return {
|
||||
log: (...args) => {
|
||||
if (!silent) {
|
||||
console.log(...args);
|
||||
}
|
||||
},
|
||||
error: (...args) => {
|
||||
if (!silent) {
|
||||
console.error(...args);
|
||||
}
|
||||
},
|
||||
warn: (...args) => {
|
||||
if (!silent) {
|
||||
console.warn(...args);
|
||||
}
|
||||
},
|
||||
info: (...args) => {
|
||||
if (!silent) {
|
||||
console.info(...args);
|
||||
}
|
||||
},
|
||||
debug: (...args) => {
|
||||
if (!silent) {
|
||||
console.debug(...args);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
9
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/index.js
generated
vendored
9
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/index.js
generated
vendored
@@ -1,9 +0,0 @@
|
||||
// Library exports
|
||||
export { SandboxManager } from './sandbox/sandbox-manager.js';
|
||||
export { SandboxViolationStore } from './sandbox/sandbox-violation-store.js';
|
||||
export { SandboxRuntimeConfigSchema, NetworkConfigSchema, FilesystemConfigSchema, IgnoreViolationsConfigSchema, RipgrepConfigSchema, } from './sandbox/sandbox-config.js';
|
||||
// Utility functions
|
||||
export { getDefaultWritePaths } from './sandbox/sandbox-utils.js';
|
||||
// Platform utilities
|
||||
export { getWslVersion } from './utils/platform.js';
|
||||
//# sourceMappingURL=index.js.map
|
||||
@@ -1,263 +0,0 @@
|
||||
import { join, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import * as fs from 'node:fs';
|
||||
import { execSync } from 'node:child_process';
|
||||
import { homedir } from 'node:os';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
// Cache for path lookups (key: explicit path or empty string, value: resolved path or null)
|
||||
const bpfPathCache = new Map();
|
||||
const applySeccompPathCache = new Map();
|
||||
// Cache for global npm paths (computed once per process)
|
||||
let cachedGlobalNpmPaths = null;
|
||||
/**
|
||||
* Get paths to check for globally installed @anthropic-ai/sandbox-runtime package.
|
||||
* This is used as a fallback when the binaries aren't bundled (e.g., native builds).
|
||||
*/
|
||||
function getGlobalNpmPaths() {
|
||||
if (cachedGlobalNpmPaths)
|
||||
return cachedGlobalNpmPaths;
|
||||
const paths = [];
|
||||
// Try to get the actual global npm root
|
||||
try {
|
||||
const npmRoot = execSync('npm root -g', {
|
||||
encoding: 'utf8',
|
||||
timeout: 5000,
|
||||
stdio: ['pipe', 'pipe', 'ignore'],
|
||||
}).trim();
|
||||
if (npmRoot) {
|
||||
paths.push(join(npmRoot, '@anthropic-ai', 'sandbox-runtime'));
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// npm not available or failed
|
||||
}
|
||||
// Common global npm locations as fallbacks
|
||||
const home = homedir();
|
||||
paths.push(
|
||||
// npm global (Linux/macOS)
|
||||
join('/usr', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'), join('/usr', 'local', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'),
|
||||
// npm global with prefix (common on macOS with homebrew)
|
||||
join('/opt', 'homebrew', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'),
|
||||
// User-local npm global
|
||||
join(home, '.npm', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'), join(home, '.npm-global', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'));
|
||||
cachedGlobalNpmPaths = paths;
|
||||
return paths;
|
||||
}
|
||||
/**
|
||||
* Map Node.js process.arch to our vendor directory architecture names
|
||||
* Returns null for unsupported architectures
|
||||
*/
|
||||
function getVendorArchitecture() {
|
||||
const arch = process.arch;
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
case 'x86_64':
|
||||
return 'x64';
|
||||
case 'arm64':
|
||||
case 'aarch64':
|
||||
return 'arm64';
|
||||
case 'ia32':
|
||||
case 'x86':
|
||||
// TODO: Add support for 32-bit x86 (ia32)
|
||||
// Currently blocked because the seccomp filter does not block the socketcall() syscall,
|
||||
// which is used on 32-bit x86 for all socket operations (socket, socketpair, bind, connect, etc.).
|
||||
// On 32-bit x86, the direct socket() syscall doesn't exist - instead, all socket operations
|
||||
// are multiplexed through socketcall(SYS_SOCKET, ...), socketcall(SYS_SOCKETPAIR, ...), etc.
|
||||
//
|
||||
// To properly support 32-bit x86, we need to:
|
||||
// 1. Build a separate i386 BPF filter (BPF bytecode is architecture-specific)
|
||||
// 2. Modify vendor/seccomp-src/seccomp-unix-block.c to conditionally add rules that block:
|
||||
// - socketcall(SYS_SOCKET, [AF_UNIX, ...])
|
||||
// - socketcall(SYS_SOCKETPAIR, [AF_UNIX, ...])
|
||||
// 3. This requires complex BPF logic to inspect socketcall's sub-function argument
|
||||
//
|
||||
// Until then, 32-bit x86 is not supported to avoid a security bypass.
|
||||
logForDebugging(`[SeccompFilter] 32-bit x86 (ia32) is not currently supported due to missing socketcall() syscall blocking. ` +
|
||||
`The current seccomp filter only blocks socket(AF_UNIX, ...), but on 32-bit x86, socketcall() can be used to bypass this.`, { level: 'error' });
|
||||
return null;
|
||||
default:
|
||||
logForDebugging(`[SeccompFilter] Unsupported architecture: ${arch}. Only x64 and arm64 are supported.`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get local paths to check for seccomp files (bundled or package installs).
|
||||
*/
|
||||
function getLocalSeccompPaths(filename) {
|
||||
const arch = getVendorArchitecture();
|
||||
if (!arch)
|
||||
return [];
|
||||
const baseDir = dirname(fileURLToPath(import.meta.url));
|
||||
const relativePath = join('vendor', 'seccomp', arch, filename);
|
||||
return [
|
||||
join(baseDir, relativePath), // bundled: same directory as bundle (e.g., when bundled into claude-cli)
|
||||
join(baseDir, '..', '..', relativePath), // package root: vendor/seccomp/...
|
||||
join(baseDir, '..', relativePath), // dist: dist/vendor/seccomp/...
|
||||
];
|
||||
}
|
||||
/**
|
||||
* Get the path to a pre-generated BPF filter file from the vendor directory
|
||||
* Returns the path if it exists, null otherwise
|
||||
*
|
||||
* Pre-generated BPF files are organized by architecture:
|
||||
* - vendor/seccomp/{x64,arm64}/unix-block.bpf
|
||||
*
|
||||
* Tries multiple paths for resilience:
|
||||
* 0. Explicit path provided via parameter (checked first if provided)
|
||||
* 1. vendor/seccomp/{arch}/unix-block.bpf (bundled - when bundled into consuming packages)
|
||||
* 2. ../../vendor/seccomp/{arch}/unix-block.bpf (package root - standard npm installs)
|
||||
* 3. ../vendor/seccomp/{arch}/unix-block.bpf (dist/vendor - for bundlers)
|
||||
* 4. Global npm install (if seccompBinaryPath not provided) - for native builds
|
||||
*
|
||||
* @param seccompBinaryPath - Optional explicit path to the BPF filter file. If provided and
|
||||
* exists, it will be used. If not provided, falls back to searching local paths and then
|
||||
* global npm install (for native builds where vendor directory isn't bundled).
|
||||
*/
|
||||
export function getPreGeneratedBpfPath(seccompBinaryPath) {
|
||||
const cacheKey = seccompBinaryPath ?? '';
|
||||
if (bpfPathCache.has(cacheKey)) {
|
||||
return bpfPathCache.get(cacheKey);
|
||||
}
|
||||
const result = findBpfPath(seccompBinaryPath);
|
||||
bpfPathCache.set(cacheKey, result);
|
||||
return result;
|
||||
}
|
||||
// NOTE: This is a slow operation (synchronous fs lookups + execSync). Ensure calls
|
||||
// are memoized at the top level rather than invoked repeatedly.
|
||||
function findBpfPath(seccompBinaryPath) {
|
||||
// Check explicit path first (highest priority)
|
||||
if (seccompBinaryPath) {
|
||||
if (fs.existsSync(seccompBinaryPath)) {
|
||||
logForDebugging(`[SeccompFilter] Using BPF filter from explicit path: ${seccompBinaryPath}`);
|
||||
return seccompBinaryPath;
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Explicit path provided but file not found: ${seccompBinaryPath}`);
|
||||
}
|
||||
const arch = getVendorArchitecture();
|
||||
if (!arch) {
|
||||
logForDebugging(`[SeccompFilter] Cannot find pre-generated BPF filter: unsupported architecture ${process.arch}`);
|
||||
return null;
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Detected architecture: ${arch}`);
|
||||
// Check local paths first (bundled or package install)
|
||||
for (const bpfPath of getLocalSeccompPaths('unix-block.bpf')) {
|
||||
if (fs.existsSync(bpfPath)) {
|
||||
logForDebugging(`[SeccompFilter] Found pre-generated BPF filter: ${bpfPath} (${arch})`);
|
||||
return bpfPath;
|
||||
}
|
||||
}
|
||||
// Fallback: check global npm install (for native builds without bundled vendor)
|
||||
for (const globalBase of getGlobalNpmPaths()) {
|
||||
const bpfPath = join(globalBase, 'vendor', 'seccomp', arch, 'unix-block.bpf');
|
||||
if (fs.existsSync(bpfPath)) {
|
||||
logForDebugging(`[SeccompFilter] Found pre-generated BPF filter in global install: ${bpfPath} (${arch})`);
|
||||
return bpfPath;
|
||||
}
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Pre-generated BPF filter not found in any expected location (${arch})`);
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Get the path to the apply-seccomp binary from the vendor directory
|
||||
* Returns the path if it exists, null otherwise
|
||||
*
|
||||
* Pre-built apply-seccomp binaries are organized by architecture:
|
||||
* - vendor/seccomp/{x64,arm64}/apply-seccomp
|
||||
*
|
||||
* Tries multiple paths for resilience:
|
||||
* 0. Explicit path provided via parameter (checked first if provided)
|
||||
* 1. vendor/seccomp/{arch}/apply-seccomp (bundled - when bundled into consuming packages)
|
||||
* 2. ../../vendor/seccomp/{arch}/apply-seccomp (package root - standard npm installs)
|
||||
* 3. ../vendor/seccomp/{arch}/apply-seccomp (dist/vendor - for bundlers)
|
||||
* 4. Global npm install (if seccompBinaryPath not provided) - for native builds
|
||||
*
|
||||
* @param seccompBinaryPath - Optional explicit path to the apply-seccomp binary. If provided
|
||||
* and exists, it will be used. If not provided, falls back to searching local paths and
|
||||
* then global npm install (for native builds where vendor directory isn't bundled).
|
||||
*/
|
||||
export function getApplySeccompBinaryPath(seccompBinaryPath) {
|
||||
const cacheKey = seccompBinaryPath ?? '';
|
||||
if (applySeccompPathCache.has(cacheKey)) {
|
||||
return applySeccompPathCache.get(cacheKey);
|
||||
}
|
||||
const result = findApplySeccompPath(seccompBinaryPath);
|
||||
applySeccompPathCache.set(cacheKey, result);
|
||||
return result;
|
||||
}
|
||||
function findApplySeccompPath(seccompBinaryPath) {
|
||||
// Check explicit path first (highest priority)
|
||||
if (seccompBinaryPath) {
|
||||
if (fs.existsSync(seccompBinaryPath)) {
|
||||
logForDebugging(`[SeccompFilter] Using apply-seccomp binary from explicit path: ${seccompBinaryPath}`);
|
||||
return seccompBinaryPath;
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Explicit path provided but file not found: ${seccompBinaryPath}`);
|
||||
}
|
||||
const arch = getVendorArchitecture();
|
||||
if (!arch) {
|
||||
logForDebugging(`[SeccompFilter] Cannot find apply-seccomp binary: unsupported architecture ${process.arch}`);
|
||||
return null;
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Looking for apply-seccomp binary for architecture: ${arch}`);
|
||||
// Check local paths first (bundled or package install)
|
||||
for (const binaryPath of getLocalSeccompPaths('apply-seccomp')) {
|
||||
if (fs.existsSync(binaryPath)) {
|
||||
logForDebugging(`[SeccompFilter] Found apply-seccomp binary: ${binaryPath} (${arch})`);
|
||||
return binaryPath;
|
||||
}
|
||||
}
|
||||
// Fallback: check global npm install (for native builds without bundled vendor)
|
||||
for (const globalBase of getGlobalNpmPaths()) {
|
||||
const binaryPath = join(globalBase, 'vendor', 'seccomp', arch, 'apply-seccomp');
|
||||
if (fs.existsSync(binaryPath)) {
|
||||
logForDebugging(`[SeccompFilter] Found apply-seccomp binary in global install: ${binaryPath} (${arch})`);
|
||||
return binaryPath;
|
||||
}
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] apply-seccomp binary not found in any expected location (${arch})`);
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Get the path to a pre-generated seccomp BPF filter that blocks Unix domain socket creation
|
||||
* Returns the path to the BPF filter file, or null if not available
|
||||
*
|
||||
* The filter blocks socket(AF_UNIX, ...) syscalls while allowing all other syscalls.
|
||||
* This prevents creation of new Unix domain socket file descriptors.
|
||||
*
|
||||
* Security scope:
|
||||
* - Blocks: socket(AF_UNIX, ...) syscall (creating new Unix socket FDs)
|
||||
* - Does NOT block: Operations on inherited Unix socket FDs (bind, connect, sendto, etc.)
|
||||
* - Does NOT block: Unix socket FDs passed via SCM_RIGHTS
|
||||
* - For most sandboxing scenarios, blocking socket creation is sufficient
|
||||
*
|
||||
* Note: This blocks ALL Unix socket creation, regardless of path. The allowUnixSockets
|
||||
* configuration is not supported on Linux due to seccomp-bpf limitations (it cannot
|
||||
* read user-space memory to inspect socket paths).
|
||||
*
|
||||
* Requirements:
|
||||
* - Pre-generated BPF filters included for x64 and ARM64 only
|
||||
* - Other architectures are not supported
|
||||
*
|
||||
* @param seccompBinaryPath - Optional explicit path to the BPF filter file
|
||||
* @returns Path to the pre-generated BPF filter file, or null if not available
|
||||
*/
|
||||
export function generateSeccompFilter(seccompBinaryPath) {
|
||||
const preGeneratedBpf = getPreGeneratedBpfPath(seccompBinaryPath);
|
||||
if (preGeneratedBpf) {
|
||||
logForDebugging('[SeccompFilter] Using pre-generated BPF filter');
|
||||
return preGeneratedBpf;
|
||||
}
|
||||
logForDebugging('[SeccompFilter] Pre-generated BPF filter not available for this architecture. ' +
|
||||
'Only x64 and arm64 are supported.', { level: 'error' });
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Clean up a seccomp filter file
|
||||
* Since we only use pre-generated BPF files from vendor/, this is a no-op.
|
||||
* Pre-generated files are never deleted.
|
||||
* Kept for backward compatibility with existing code that calls it.
|
||||
*/
|
||||
export function cleanupSeccompFilter(_filterPath) {
|
||||
// No-op: pre-generated BPF files are never cleaned up
|
||||
}
|
||||
//# sourceMappingURL=generate-seccomp-filter.js.map
|
||||
217
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/http-proxy.js
generated
vendored
217
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/http-proxy.js
generated
vendored
@@ -1,217 +0,0 @@
|
||||
import { Agent, createServer } from 'node:http';
|
||||
import { request as httpRequest } from 'node:http';
|
||||
import { request as httpsRequest } from 'node:https';
|
||||
import { connect } from 'node:net';
|
||||
import { URL } from 'node:url';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
export function createHttpProxyServer(options) {
|
||||
const server = createServer();
|
||||
// Handle CONNECT requests for HTTPS traffic
|
||||
server.on('connect', async (req, socket) => {
|
||||
// Attach error handler immediately to prevent unhandled errors
|
||||
socket.on('error', err => {
|
||||
logForDebugging(`Client socket error: ${err.message}`, { level: 'error' });
|
||||
});
|
||||
try {
|
||||
const [hostname, portStr] = req.url.split(':');
|
||||
const port = portStr === undefined ? undefined : parseInt(portStr, 10);
|
||||
if (!hostname || !port) {
|
||||
logForDebugging(`Invalid CONNECT request: ${req.url}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 400 Bad Request\r\n\r\n');
|
||||
return;
|
||||
}
|
||||
const allowed = await options.filter(port, hostname, socket);
|
||||
if (!allowed) {
|
||||
logForDebugging(`Connection blocked to ${hostname}:${port}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 403 Forbidden\r\n' +
|
||||
'Content-Type: text/plain\r\n' +
|
||||
'X-Proxy-Error: blocked-by-allowlist\r\n' +
|
||||
'\r\n' +
|
||||
'Connection blocked by network allowlist');
|
||||
return;
|
||||
}
|
||||
// Check if this host should be routed through a MITM proxy
|
||||
const mitmSocketPath = options.getMitmSocketPath?.(hostname);
|
||||
if (mitmSocketPath) {
|
||||
// Route through MITM proxy via Unix socket
|
||||
logForDebugging(`Routing CONNECT ${hostname}:${port} through MITM proxy at ${mitmSocketPath}`);
|
||||
const mitmSocket = connect({ path: mitmSocketPath }, () => {
|
||||
// Send CONNECT request to the MITM proxy
|
||||
mitmSocket.write(`CONNECT ${hostname}:${port} HTTP/1.1\r\n` +
|
||||
`Host: ${hostname}:${port}\r\n` +
|
||||
'\r\n');
|
||||
});
|
||||
// Buffer to accumulate the MITM proxy's response
|
||||
let responseBuffer = '';
|
||||
const onMitmData = (chunk) => {
|
||||
responseBuffer += chunk.toString();
|
||||
// Check if we've received the full HTTP response headers
|
||||
const headerEndIndex = responseBuffer.indexOf('\r\n\r\n');
|
||||
if (headerEndIndex !== -1) {
|
||||
// Remove data listener, we're done parsing the response
|
||||
mitmSocket.removeListener('data', onMitmData);
|
||||
// Check if MITM proxy accepted the connection
|
||||
const statusLine = responseBuffer.substring(0, responseBuffer.indexOf('\r\n'));
|
||||
if (statusLine.includes(' 200 ')) {
|
||||
// Connection established, now pipe data between client and MITM
|
||||
socket.write('HTTP/1.1 200 Connection Established\r\n\r\n');
|
||||
// If there's any data after the headers, write it to the client
|
||||
const remainingData = responseBuffer.substring(headerEndIndex + 4);
|
||||
if (remainingData.length > 0) {
|
||||
socket.write(remainingData);
|
||||
}
|
||||
mitmSocket.pipe(socket);
|
||||
socket.pipe(mitmSocket);
|
||||
}
|
||||
else {
|
||||
logForDebugging(`MITM proxy rejected CONNECT: ${statusLine}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 502 Bad Gateway\r\n\r\n');
|
||||
mitmSocket.destroy();
|
||||
}
|
||||
}
|
||||
};
|
||||
mitmSocket.on('data', onMitmData);
|
||||
mitmSocket.on('error', err => {
|
||||
logForDebugging(`MITM proxy connection failed: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 502 Bad Gateway\r\n\r\n');
|
||||
});
|
||||
socket.on('error', err => {
|
||||
logForDebugging(`Client socket error: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
mitmSocket.destroy();
|
||||
});
|
||||
socket.on('end', () => mitmSocket.end());
|
||||
mitmSocket.on('end', () => socket.end());
|
||||
}
|
||||
else {
|
||||
// Direct connection (original behavior)
|
||||
const serverSocket = connect(port, hostname, () => {
|
||||
socket.write('HTTP/1.1 200 Connection Established\r\n\r\n');
|
||||
serverSocket.pipe(socket);
|
||||
socket.pipe(serverSocket);
|
||||
});
|
||||
serverSocket.on('error', err => {
|
||||
logForDebugging(`CONNECT tunnel failed: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 502 Bad Gateway\r\n\r\n');
|
||||
});
|
||||
socket.on('error', err => {
|
||||
logForDebugging(`Client socket error: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
serverSocket.destroy();
|
||||
});
|
||||
socket.on('end', () => serverSocket.end());
|
||||
serverSocket.on('end', () => socket.end());
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`Error handling CONNECT: ${err}`, { level: 'error' });
|
||||
socket.end('HTTP/1.1 500 Internal Server Error\r\n\r\n');
|
||||
}
|
||||
});
|
||||
// Handle regular HTTP requests
|
||||
server.on('request', async (req, res) => {
|
||||
try {
|
||||
const url = new URL(req.url);
|
||||
const hostname = url.hostname;
|
||||
const port = url.port
|
||||
? parseInt(url.port, 10)
|
||||
: url.protocol === 'https:'
|
||||
? 443
|
||||
: 80;
|
||||
const allowed = await options.filter(port, hostname, req.socket);
|
||||
if (!allowed) {
|
||||
logForDebugging(`HTTP request blocked to ${hostname}:${port}`, {
|
||||
level: 'error',
|
||||
});
|
||||
res.writeHead(403, {
|
||||
'Content-Type': 'text/plain',
|
||||
'X-Proxy-Error': 'blocked-by-allowlist',
|
||||
});
|
||||
res.end('Connection blocked by network allowlist');
|
||||
return;
|
||||
}
|
||||
// Check if this host should be routed through a MITM proxy
|
||||
const mitmSocketPath = options.getMitmSocketPath?.(hostname);
|
||||
if (mitmSocketPath) {
|
||||
// Route through MITM proxy via Unix socket
|
||||
// Use an agent that connects via the Unix socket
|
||||
logForDebugging(`Routing HTTP ${req.method} ${hostname}:${port} through MITM proxy at ${mitmSocketPath}`);
|
||||
const mitmAgent = new Agent({
|
||||
// @ts-expect-error - socketPath is valid but not in types
|
||||
socketPath: mitmSocketPath,
|
||||
});
|
||||
// Send request to MITM proxy with full URL (proxy-style request)
|
||||
const proxyReq = httpRequest({
|
||||
agent: mitmAgent,
|
||||
// For proxy requests, path should be the full URL
|
||||
path: req.url,
|
||||
method: req.method,
|
||||
headers: {
|
||||
...req.headers,
|
||||
host: url.host,
|
||||
},
|
||||
}, proxyRes => {
|
||||
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
||||
proxyRes.pipe(res);
|
||||
});
|
||||
proxyReq.on('error', err => {
|
||||
logForDebugging(`MITM proxy request failed: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end('Bad Gateway');
|
||||
}
|
||||
});
|
||||
req.pipe(proxyReq);
|
||||
}
|
||||
else {
|
||||
// Direct request (original behavior)
|
||||
// Choose http or https module
|
||||
const requestFn = url.protocol === 'https:' ? httpsRequest : httpRequest;
|
||||
const proxyReq = requestFn({
|
||||
hostname,
|
||||
port,
|
||||
path: url.pathname + url.search,
|
||||
method: req.method,
|
||||
headers: {
|
||||
...req.headers,
|
||||
host: url.host,
|
||||
},
|
||||
}, proxyRes => {
|
||||
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
||||
proxyRes.pipe(res);
|
||||
});
|
||||
proxyReq.on('error', err => {
|
||||
logForDebugging(`Proxy request failed: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end('Bad Gateway');
|
||||
}
|
||||
});
|
||||
req.pipe(proxyReq);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`Error handling HTTP request: ${err}`, { level: 'error' });
|
||||
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
||||
res.end('Internal Server Error');
|
||||
}
|
||||
});
|
||||
return server;
|
||||
}
|
||||
//# sourceMappingURL=http-proxy.js.map
|
||||
@@ -1,875 +0,0 @@
|
||||
import shellquote from 'shell-quote';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
import { whichSync } from '../utils/which.js';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import * as fs from 'fs';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { tmpdir } from 'node:os';
|
||||
import path, { join } from 'node:path';
|
||||
import { ripGrep } from '../utils/ripgrep.js';
|
||||
import { generateProxyEnvVars, normalizePathForSandbox, normalizeCaseForComparison, isSymlinkOutsideBoundary, DANGEROUS_FILES, getDangerousDirectories, } from './sandbox-utils.js';
|
||||
import { generateSeccompFilter, cleanupSeccompFilter, getPreGeneratedBpfPath, getApplySeccompBinaryPath, } from './generate-seccomp-filter.js';
|
||||
/** Default max depth for searching dangerous files */
|
||||
const DEFAULT_MANDATORY_DENY_SEARCH_DEPTH = 3;
|
||||
/**
|
||||
* Find if any component of the path is a symlink within the allowed write paths.
|
||||
* Returns the symlink path if found, or null if no symlinks.
|
||||
*
|
||||
* This is used to detect and block symlink replacement attacks where an attacker
|
||||
* could delete a symlink and create a real directory with malicious content.
|
||||
*/
|
||||
function findSymlinkInPath(targetPath, allowedWritePaths) {
|
||||
const parts = targetPath.split(path.sep);
|
||||
let currentPath = '';
|
||||
for (const part of parts) {
|
||||
if (!part)
|
||||
continue; // Skip empty parts (leading /)
|
||||
const nextPath = currentPath + path.sep + part;
|
||||
try {
|
||||
const stats = fs.lstatSync(nextPath);
|
||||
if (stats.isSymbolicLink()) {
|
||||
// Check if this symlink is within an allowed write path
|
||||
const isWithinAllowedPath = allowedWritePaths.some(allowedPath => nextPath.startsWith(allowedPath + '/') || nextPath === allowedPath);
|
||||
if (isWithinAllowedPath) {
|
||||
return nextPath;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Path doesn't exist - no symlink issue here
|
||||
break;
|
||||
}
|
||||
currentPath = nextPath;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Check if any existing component in the path is a file (not a directory).
|
||||
* If so, the target path can never be created because you can't mkdir under a file.
|
||||
*
|
||||
* This handles the git worktree case: .git is a file, so .git/hooks can never
|
||||
* exist and there's nothing to deny.
|
||||
*/
|
||||
function hasFileAncestor(targetPath) {
|
||||
const parts = targetPath.split(path.sep);
|
||||
let currentPath = '';
|
||||
for (const part of parts) {
|
||||
if (!part)
|
||||
continue; // Skip empty parts (leading /)
|
||||
const nextPath = currentPath + path.sep + part;
|
||||
try {
|
||||
const stat = fs.statSync(nextPath);
|
||||
if (stat.isFile() || stat.isSymbolicLink()) {
|
||||
// This component exists as a file — nothing below it can be created
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Path doesn't exist — stop checking
|
||||
break;
|
||||
}
|
||||
currentPath = nextPath;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Find the first non-existent path component.
|
||||
* E.g., for "/existing/parent/nonexistent/child/file.txt" where /existing/parent exists,
|
||||
* returns "/existing/parent/nonexistent"
|
||||
*
|
||||
* This is used to block creation of non-existent deny paths by mounting /dev/null
|
||||
* at the first missing component, preventing mkdir from creating the parent directories.
|
||||
*/
|
||||
function findFirstNonExistentComponent(targetPath) {
|
||||
const parts = targetPath.split(path.sep);
|
||||
let currentPath = '';
|
||||
for (const part of parts) {
|
||||
if (!part)
|
||||
continue; // Skip empty parts (leading /)
|
||||
const nextPath = currentPath + path.sep + part;
|
||||
if (!fs.existsSync(nextPath)) {
|
||||
return nextPath;
|
||||
}
|
||||
currentPath = nextPath;
|
||||
}
|
||||
return targetPath; // Shouldn't reach here if called correctly
|
||||
}
|
||||
/**
|
||||
* Get mandatory deny paths using ripgrep (Linux only).
|
||||
* Uses a SINGLE ripgrep call with multiple glob patterns for efficiency.
|
||||
* With --max-depth limiting, this is fast enough to run on each command without memoization.
|
||||
*/
|
||||
async function linuxGetMandatoryDenyPaths(ripgrepConfig = { command: 'rg' }, maxDepth = DEFAULT_MANDATORY_DENY_SEARCH_DEPTH, allowGitConfig = false, abortSignal) {
|
||||
const cwd = process.cwd();
|
||||
// Use provided signal or create a fallback controller
|
||||
const fallbackController = new AbortController();
|
||||
const signal = abortSignal ?? fallbackController.signal;
|
||||
const dangerousDirectories = getDangerousDirectories();
|
||||
// Note: Settings files are added at the callsite in sandbox-manager.ts
|
||||
const denyPaths = [
|
||||
// Dangerous files in CWD
|
||||
...DANGEROUS_FILES.map(f => path.resolve(cwd, f)),
|
||||
// Dangerous directories in CWD
|
||||
...dangerousDirectories.map(d => path.resolve(cwd, d)),
|
||||
];
|
||||
// Git hooks and config are only denied when .git exists as a directory.
|
||||
// In git worktrees, .git is a file (e.g., "gitdir: /path/..."), so
|
||||
// .git/hooks can never exist — denying it would cause bwrap to fail.
|
||||
// When .git doesn't exist at all, mounting at .git would block its
|
||||
// creation and break git init.
|
||||
const dotGitPath = path.resolve(cwd, '.git');
|
||||
let dotGitIsDirectory = false;
|
||||
try {
|
||||
dotGitIsDirectory = fs.statSync(dotGitPath).isDirectory();
|
||||
}
|
||||
catch {
|
||||
// .git doesn't exist
|
||||
}
|
||||
if (dotGitIsDirectory) {
|
||||
// Git hooks always blocked for security
|
||||
denyPaths.push(path.resolve(cwd, '.git/hooks'));
|
||||
// Git config conditionally blocked based on allowGitConfig setting
|
||||
if (!allowGitConfig) {
|
||||
denyPaths.push(path.resolve(cwd, '.git/config'));
|
||||
}
|
||||
}
|
||||
// Build iglob args for all patterns in one ripgrep call
|
||||
const iglobArgs = [];
|
||||
for (const fileName of DANGEROUS_FILES) {
|
||||
iglobArgs.push('--iglob', fileName);
|
||||
}
|
||||
for (const dirName of dangerousDirectories) {
|
||||
iglobArgs.push('--iglob', `**/${dirName}/**`);
|
||||
}
|
||||
// Git hooks always blocked in nested repos
|
||||
iglobArgs.push('--iglob', '**/.git/hooks/**');
|
||||
// Git config conditionally blocked in nested repos
|
||||
if (!allowGitConfig) {
|
||||
iglobArgs.push('--iglob', '**/.git/config');
|
||||
}
|
||||
// Single ripgrep call to find all dangerous paths in subdirectories
|
||||
// Limit depth for performance - deeply nested dangerous files are rare
|
||||
// and the security benefit doesn't justify the traversal cost
|
||||
let matches = [];
|
||||
try {
|
||||
matches = await ripGrep([
|
||||
'--files',
|
||||
'--hidden',
|
||||
'--max-depth',
|
||||
String(maxDepth),
|
||||
...iglobArgs,
|
||||
'-g',
|
||||
'!**/node_modules/**',
|
||||
], cwd, signal, ripgrepConfig);
|
||||
}
|
||||
catch (error) {
|
||||
logForDebugging(`[Sandbox] ripgrep scan failed: ${error}`);
|
||||
}
|
||||
// Process matches
|
||||
for (const match of matches) {
|
||||
const absolutePath = path.resolve(cwd, match);
|
||||
// File inside a dangerous directory -> add the directory path
|
||||
let foundDir = false;
|
||||
for (const dirName of [...dangerousDirectories, '.git']) {
|
||||
const normalizedDirName = normalizeCaseForComparison(dirName);
|
||||
const segments = absolutePath.split(path.sep);
|
||||
const dirIndex = segments.findIndex(s => normalizeCaseForComparison(s) === normalizedDirName);
|
||||
if (dirIndex !== -1) {
|
||||
// For .git, we want hooks/ or config, not the whole .git dir
|
||||
if (dirName === '.git') {
|
||||
const gitDir = segments.slice(0, dirIndex + 1).join(path.sep);
|
||||
if (match.includes('.git/hooks')) {
|
||||
denyPaths.push(path.join(gitDir, 'hooks'));
|
||||
}
|
||||
else if (match.includes('.git/config')) {
|
||||
denyPaths.push(path.join(gitDir, 'config'));
|
||||
}
|
||||
}
|
||||
else {
|
||||
denyPaths.push(segments.slice(0, dirIndex + 1).join(path.sep));
|
||||
}
|
||||
foundDir = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Dangerous file match
|
||||
if (!foundDir) {
|
||||
denyPaths.push(absolutePath);
|
||||
}
|
||||
}
|
||||
return [...new Set(denyPaths)];
|
||||
}
|
||||
// Track generated seccomp filters for cleanup on process exit
|
||||
const generatedSeccompFilters = new Set();
|
||||
// Track mount points created by bwrap for non-existent deny paths.
|
||||
// When bwrap does --ro-bind /dev/null /nonexistent/path, it creates an empty
|
||||
// file on the host as a mount point. These persist after bwrap exits and must
|
||||
// be cleaned up explicitly.
|
||||
const bwrapMountPoints = new Set();
|
||||
let exitHandlerRegistered = false;
|
||||
/**
|
||||
* Register cleanup handler for generated seccomp filters and bwrap mount points
|
||||
*/
|
||||
function registerExitCleanupHandler() {
|
||||
if (exitHandlerRegistered) {
|
||||
return;
|
||||
}
|
||||
process.on('exit', () => {
|
||||
for (const filterPath of generatedSeccompFilters) {
|
||||
try {
|
||||
cleanupSeccompFilter(filterPath);
|
||||
}
|
||||
catch {
|
||||
// Ignore cleanup errors during exit
|
||||
}
|
||||
}
|
||||
cleanupBwrapMountPoints();
|
||||
});
|
||||
exitHandlerRegistered = true;
|
||||
}
|
||||
/**
|
||||
* Clean up mount point files created by bwrap for non-existent deny paths.
|
||||
*
|
||||
* When protecting non-existent deny paths, bwrap creates empty files on the
|
||||
* host filesystem as mount points for --ro-bind. These files persist after
|
||||
* bwrap exits. This function removes them.
|
||||
*
|
||||
* This should be called after each sandboxed command completes to prevent
|
||||
* ghost dotfiles (e.g. .bashrc, .gitconfig) from appearing in the working
|
||||
* directory. It is also called automatically on process exit as a safety net.
|
||||
*
|
||||
* Safe to call at any time — it only removes files that were tracked during
|
||||
* generateFilesystemArgs() and skips any that no longer exist.
|
||||
*/
|
||||
export function cleanupBwrapMountPoints() {
|
||||
for (const mountPoint of bwrapMountPoints) {
|
||||
try {
|
||||
// Only remove if it's still the empty file/directory bwrap created.
|
||||
// If something else has written real content, leave it alone.
|
||||
const stat = fs.statSync(mountPoint);
|
||||
if (stat.isFile() && stat.size === 0) {
|
||||
fs.unlinkSync(mountPoint);
|
||||
logForDebugging(`[Sandbox Linux] Cleaned up bwrap mount point (file): ${mountPoint}`);
|
||||
}
|
||||
else if (stat.isDirectory()) {
|
||||
// Empty directory mount points are created for intermediate
|
||||
// components (Fix 2). Only remove if still empty.
|
||||
const entries = fs.readdirSync(mountPoint);
|
||||
if (entries.length === 0) {
|
||||
fs.rmdirSync(mountPoint);
|
||||
logForDebugging(`[Sandbox Linux] Cleaned up bwrap mount point (dir): ${mountPoint}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Ignore cleanup errors — the file may have already been removed
|
||||
}
|
||||
}
|
||||
bwrapMountPoints.clear();
|
||||
}
|
||||
/**
|
||||
* Get detailed status of Linux sandbox dependencies
|
||||
*/
|
||||
export function getLinuxDependencyStatus(seccompConfig) {
|
||||
return {
|
||||
hasBwrap: whichSync('bwrap') !== null,
|
||||
hasSocat: whichSync('socat') !== null,
|
||||
hasSeccompBpf: getPreGeneratedBpfPath(seccompConfig?.bpfPath) !== null,
|
||||
hasSeccompApply: getApplySeccompBinaryPath(seccompConfig?.applyPath) !== null,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Check sandbox dependencies and return structured result
|
||||
*/
|
||||
export function checkLinuxDependencies(seccompConfig) {
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
if (whichSync('bwrap') === null)
|
||||
errors.push('bubblewrap (bwrap) not installed');
|
||||
if (whichSync('socat') === null)
|
||||
errors.push('socat not installed');
|
||||
const hasBpf = getPreGeneratedBpfPath(seccompConfig?.bpfPath) !== null;
|
||||
const hasApply = getApplySeccompBinaryPath(seccompConfig?.applyPath) !== null;
|
||||
if (!hasBpf || !hasApply) {
|
||||
warnings.push('seccomp not available - unix socket access not restricted');
|
||||
}
|
||||
return { warnings, errors };
|
||||
}
|
||||
/**
|
||||
* Initialize the Linux network bridge for sandbox networking
|
||||
*
|
||||
* ARCHITECTURE NOTE:
|
||||
* Linux network sandboxing uses bwrap --unshare-net which creates a completely isolated
|
||||
* network namespace with NO network access. To enable network access, we:
|
||||
*
|
||||
* 1. Host side: Run socat bridges that listen on Unix sockets and forward to host proxy servers
|
||||
* - HTTP bridge: Unix socket -> host HTTP proxy (for HTTP/HTTPS traffic)
|
||||
* - SOCKS bridge: Unix socket -> host SOCKS5 proxy (for SSH/git traffic)
|
||||
*
|
||||
* 2. Sandbox side: Bind the Unix sockets into the isolated namespace and run socat listeners
|
||||
* - HTTP listener on port 3128 -> HTTP Unix socket -> host HTTP proxy
|
||||
* - SOCKS listener on port 1080 -> SOCKS Unix socket -> host SOCKS5 proxy
|
||||
*
|
||||
* 3. Configure environment:
|
||||
* - HTTP_PROXY=http://localhost:3128 for HTTP/HTTPS tools
|
||||
* - GIT_SSH_COMMAND with socat for SSH through SOCKS5
|
||||
*
|
||||
* LIMITATION: Unlike macOS sandbox which can enforce domain-based allowlists at the kernel level,
|
||||
* Linux's --unshare-net provides only all-or-nothing network isolation. Domain filtering happens
|
||||
* at the host proxy level, not the sandbox boundary. This means network restrictions on Linux
|
||||
* depend on the proxy's filtering capabilities.
|
||||
*
|
||||
* DEPENDENCIES: Requires bwrap (bubblewrap) and socat
|
||||
*/
|
||||
export async function initializeLinuxNetworkBridge(httpProxyPort, socksProxyPort) {
|
||||
const socketId = randomBytes(8).toString('hex');
|
||||
const httpSocketPath = join(tmpdir(), `claude-http-${socketId}.sock`);
|
||||
const socksSocketPath = join(tmpdir(), `claude-socks-${socketId}.sock`);
|
||||
// Start HTTP bridge
|
||||
const httpSocatArgs = [
|
||||
`UNIX-LISTEN:${httpSocketPath},fork,reuseaddr`,
|
||||
`TCP:localhost:${httpProxyPort},keepalive,keepidle=10,keepintvl=5,keepcnt=3`,
|
||||
];
|
||||
logForDebugging(`Starting HTTP bridge: socat ${httpSocatArgs.join(' ')}`);
|
||||
const httpBridgeProcess = spawn('socat', httpSocatArgs, {
|
||||
stdio: 'ignore',
|
||||
});
|
||||
if (!httpBridgeProcess.pid) {
|
||||
throw new Error('Failed to start HTTP bridge process');
|
||||
}
|
||||
// Add error and exit handlers to monitor bridge health
|
||||
httpBridgeProcess.on('error', err => {
|
||||
logForDebugging(`HTTP bridge process error: ${err}`, { level: 'error' });
|
||||
});
|
||||
httpBridgeProcess.on('exit', (code, signal) => {
|
||||
logForDebugging(`HTTP bridge process exited with code ${code}, signal ${signal}`, { level: code === 0 ? 'info' : 'error' });
|
||||
});
|
||||
// Start SOCKS bridge
|
||||
const socksSocatArgs = [
|
||||
`UNIX-LISTEN:${socksSocketPath},fork,reuseaddr`,
|
||||
`TCP:localhost:${socksProxyPort},keepalive,keepidle=10,keepintvl=5,keepcnt=3`,
|
||||
];
|
||||
logForDebugging(`Starting SOCKS bridge: socat ${socksSocatArgs.join(' ')}`);
|
||||
const socksBridgeProcess = spawn('socat', socksSocatArgs, {
|
||||
stdio: 'ignore',
|
||||
});
|
||||
if (!socksBridgeProcess.pid) {
|
||||
// Clean up HTTP bridge
|
||||
if (httpBridgeProcess.pid) {
|
||||
try {
|
||||
process.kill(httpBridgeProcess.pid, 'SIGTERM');
|
||||
}
|
||||
catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
throw new Error('Failed to start SOCKS bridge process');
|
||||
}
|
||||
// Add error and exit handlers to monitor bridge health
|
||||
socksBridgeProcess.on('error', err => {
|
||||
logForDebugging(`SOCKS bridge process error: ${err}`, { level: 'error' });
|
||||
});
|
||||
socksBridgeProcess.on('exit', (code, signal) => {
|
||||
logForDebugging(`SOCKS bridge process exited with code ${code}, signal ${signal}`, { level: code === 0 ? 'info' : 'error' });
|
||||
});
|
||||
// Wait for both sockets to be ready
|
||||
const maxAttempts = 5;
|
||||
for (let i = 0; i < maxAttempts; i++) {
|
||||
if (!httpBridgeProcess.pid ||
|
||||
httpBridgeProcess.killed ||
|
||||
!socksBridgeProcess.pid ||
|
||||
socksBridgeProcess.killed) {
|
||||
throw new Error('Linux bridge process died unexpectedly');
|
||||
}
|
||||
try {
|
||||
// fs already imported
|
||||
if (fs.existsSync(httpSocketPath) && fs.existsSync(socksSocketPath)) {
|
||||
logForDebugging(`Linux bridges ready after ${i + 1} attempts`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`Error checking sockets (attempt ${i + 1}): ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
if (i === maxAttempts - 1) {
|
||||
// Clean up both processes
|
||||
if (httpBridgeProcess.pid) {
|
||||
try {
|
||||
process.kill(httpBridgeProcess.pid, 'SIGTERM');
|
||||
}
|
||||
catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
if (socksBridgeProcess.pid) {
|
||||
try {
|
||||
process.kill(socksBridgeProcess.pid, 'SIGTERM');
|
||||
}
|
||||
catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
throw new Error(`Failed to create bridge sockets after ${maxAttempts} attempts`);
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, i * 100));
|
||||
}
|
||||
return {
|
||||
httpSocketPath,
|
||||
socksSocketPath,
|
||||
httpBridgeProcess,
|
||||
socksBridgeProcess,
|
||||
httpProxyPort,
|
||||
socksProxyPort,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Build the command that runs inside the sandbox.
|
||||
* Sets up HTTP proxy on port 3128 and SOCKS proxy on port 1080
|
||||
*/
|
||||
function buildSandboxCommand(httpSocketPath, socksSocketPath, userCommand, seccompFilterPath, shell, applySeccompPath) {
|
||||
// Default to bash for backward compatibility
|
||||
const shellPath = shell || 'bash';
|
||||
const socatCommands = [
|
||||
`socat TCP-LISTEN:3128,fork,reuseaddr UNIX-CONNECT:${httpSocketPath} >/dev/null 2>&1 &`,
|
||||
`socat TCP-LISTEN:1080,fork,reuseaddr UNIX-CONNECT:${socksSocketPath} >/dev/null 2>&1 &`,
|
||||
'trap "kill %1 %2 2>/dev/null; exit" EXIT',
|
||||
];
|
||||
// If seccomp filter is provided, use apply-seccomp to apply it
|
||||
if (seccompFilterPath) {
|
||||
// apply-seccomp approach:
|
||||
// 1. Outer bwrap/bash: starts socat processes (can use Unix sockets)
|
||||
// 2. apply-seccomp: applies seccomp filter and execs user command
|
||||
// 3. User command runs with seccomp active (Unix sockets blocked)
|
||||
//
|
||||
// apply-seccomp is a simple C program that:
|
||||
// - Sets PR_SET_NO_NEW_PRIVS
|
||||
// - Applies the seccomp BPF filter via prctl(PR_SET_SECCOMP)
|
||||
// - Execs the user command
|
||||
//
|
||||
// This is simpler and more portable than nested bwrap, with no FD redirects needed.
|
||||
const applySeccompBinary = getApplySeccompBinaryPath(applySeccompPath);
|
||||
if (!applySeccompBinary) {
|
||||
throw new Error('apply-seccomp binary not found. This should have been caught earlier. ' +
|
||||
'Ensure vendor/seccomp/{x64,arm64}/apply-seccomp binaries are included in the package.');
|
||||
}
|
||||
const applySeccompCmd = shellquote.quote([
|
||||
applySeccompBinary,
|
||||
seccompFilterPath,
|
||||
shellPath,
|
||||
'-c',
|
||||
userCommand,
|
||||
]);
|
||||
const innerScript = [...socatCommands, applySeccompCmd].join('\n');
|
||||
return `${shellPath} -c ${shellquote.quote([innerScript])}`;
|
||||
}
|
||||
else {
|
||||
// No seccomp filter - run user command directly
|
||||
const innerScript = [
|
||||
...socatCommands,
|
||||
`eval ${shellquote.quote([userCommand])}`,
|
||||
].join('\n');
|
||||
return `${shellPath} -c ${shellquote.quote([innerScript])}`;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate filesystem bind mount arguments for bwrap
|
||||
*/
|
||||
async function generateFilesystemArgs(readConfig, writeConfig, ripgrepConfig = { command: 'rg' }, mandatoryDenySearchDepth = DEFAULT_MANDATORY_DENY_SEARCH_DEPTH, allowGitConfig = false, abortSignal) {
|
||||
const args = [];
|
||||
// fs already imported
|
||||
// Determine initial root mount based on write restrictions
|
||||
if (writeConfig) {
|
||||
// Write restrictions: Start with read-only root, then allow writes to specific paths
|
||||
args.push('--ro-bind', '/', '/');
|
||||
// Collect normalized allowed write paths for later checking
|
||||
const allowedWritePaths = [];
|
||||
// Allow writes to specific paths
|
||||
for (const pathPattern of writeConfig.allowOnly || []) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
logForDebugging(`[Sandbox Linux] Processing write path: ${pathPattern} -> ${normalizedPath}`);
|
||||
// Skip /dev/* paths since --dev /dev already handles them
|
||||
if (normalizedPath.startsWith('/dev/')) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping /dev path: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
if (!fs.existsSync(normalizedPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping non-existent write path: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
// Check if path is a symlink pointing outside expected boundaries
|
||||
// bwrap follows symlinks, so --bind on a symlink makes the target writable
|
||||
// This could unexpectedly expose paths the user didn't intend to allow
|
||||
try {
|
||||
const resolvedPath = fs.realpathSync(normalizedPath);
|
||||
// Trim trailing slashes before comparing: realpathSync never returns
|
||||
// a trailing slash, but normalizedPath may have one, which would cause
|
||||
// a false mismatch and incorrectly treat the path as a symlink.
|
||||
const normalizedForComparison = normalizedPath.replace(/\/+$/, '');
|
||||
if (resolvedPath !== normalizedForComparison &&
|
||||
isSymlinkOutsideBoundary(normalizedPath, resolvedPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping symlink write path pointing outside expected location: ${pathPattern} -> ${resolvedPath}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// realpathSync failed - path might not exist or be accessible, skip it
|
||||
logForDebugging(`[Sandbox Linux] Skipping write path that could not be resolved: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
args.push('--bind', normalizedPath, normalizedPath);
|
||||
allowedWritePaths.push(normalizedPath);
|
||||
}
|
||||
// Deny writes within allowed paths (user-specified + mandatory denies)
|
||||
const denyPaths = [
|
||||
...(writeConfig.denyWithinAllow || []),
|
||||
...(await linuxGetMandatoryDenyPaths(ripgrepConfig, mandatoryDenySearchDepth, allowGitConfig, abortSignal)),
|
||||
];
|
||||
for (const pathPattern of denyPaths) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
// Skip /dev/* paths since --dev /dev already handles them
|
||||
if (normalizedPath.startsWith('/dev/')) {
|
||||
continue;
|
||||
}
|
||||
// Check for symlinks in the path - if any parent component is a symlink,
|
||||
// mount /dev/null there to prevent symlink replacement attacks.
|
||||
// Attack scenario: .claude is a symlink to ./decoy/, attacker deletes
|
||||
// symlink and creates real .claude/settings.json with malicious hooks.
|
||||
const symlinkInPath = findSymlinkInPath(normalizedPath, allowedWritePaths);
|
||||
if (symlinkInPath) {
|
||||
args.push('--ro-bind', '/dev/null', symlinkInPath);
|
||||
logForDebugging(`[Sandbox Linux] Mounted /dev/null at symlink ${symlinkInPath} to prevent symlink replacement attack`);
|
||||
continue;
|
||||
}
|
||||
// Handle non-existent paths by mounting /dev/null to block creation.
|
||||
// Without this, a sandboxed process could mkdir+write a denied path that
|
||||
// doesn't exist yet, bypassing the deny rule entirely.
|
||||
//
|
||||
// bwrap creates empty files on the host as mount points for these binds.
|
||||
// We track them in bwrapMountPoints so cleanupBwrapMountPoints() can
|
||||
// remove them after the command exits.
|
||||
if (!fs.existsSync(normalizedPath)) {
|
||||
// Fix 1 (worktree): If any existing component in the deny path is a
|
||||
// file (not a directory), skip the deny entirely. You can't mkdir
|
||||
// under a file, so the deny path can never be created. This handles
|
||||
// git worktrees where .git is a file.
|
||||
if (hasFileAncestor(normalizedPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping deny path with file ancestor (cannot create paths under a file): ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
// Find the deepest existing ancestor directory
|
||||
let ancestorPath = path.dirname(normalizedPath);
|
||||
while (ancestorPath !== '/' && !fs.existsSync(ancestorPath)) {
|
||||
ancestorPath = path.dirname(ancestorPath);
|
||||
}
|
||||
// Only protect if the existing ancestor is within an allowed write path.
|
||||
// If not, the path is already read-only from --ro-bind / /.
|
||||
const ancestorIsWithinAllowedPath = allowedWritePaths.some(allowedPath => ancestorPath.startsWith(allowedPath + '/') ||
|
||||
ancestorPath === allowedPath ||
|
||||
normalizedPath.startsWith(allowedPath + '/'));
|
||||
if (ancestorIsWithinAllowedPath) {
|
||||
const firstNonExistent = findFirstNonExistentComponent(normalizedPath);
|
||||
// Fix 2: If firstNonExistent is an intermediate component (not the
|
||||
// leaf deny path itself), mount a read-only empty directory instead
|
||||
// of /dev/null. This prevents the component from appearing as a file
|
||||
// which breaks tools that expect to traverse it as a directory.
|
||||
if (firstNonExistent !== normalizedPath) {
|
||||
const emptyDir = fs.mkdtempSync(path.join(tmpdir(), 'claude-empty-'));
|
||||
args.push('--ro-bind', emptyDir, firstNonExistent);
|
||||
bwrapMountPoints.add(firstNonExistent);
|
||||
registerExitCleanupHandler();
|
||||
logForDebugging(`[Sandbox Linux] Mounted empty dir at ${firstNonExistent} to block creation of ${normalizedPath}`);
|
||||
}
|
||||
else {
|
||||
args.push('--ro-bind', '/dev/null', firstNonExistent);
|
||||
bwrapMountPoints.add(firstNonExistent);
|
||||
registerExitCleanupHandler();
|
||||
logForDebugging(`[Sandbox Linux] Mounted /dev/null at ${firstNonExistent} to block creation of ${normalizedPath}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
logForDebugging(`[Sandbox Linux] Skipping non-existent deny path not within allowed paths: ${normalizedPath}`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// Only add deny binding if this path is within an allowed write path
|
||||
// Otherwise it's already read-only from the initial --ro-bind / /
|
||||
const isWithinAllowedPath = allowedWritePaths.some(allowedPath => normalizedPath.startsWith(allowedPath + '/') ||
|
||||
normalizedPath === allowedPath);
|
||||
if (isWithinAllowedPath) {
|
||||
args.push('--ro-bind', normalizedPath, normalizedPath);
|
||||
}
|
||||
else {
|
||||
logForDebugging(`[Sandbox Linux] Skipping deny path not within allowed paths: ${normalizedPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// No write restrictions: Allow all writes
|
||||
args.push('--bind', '/', '/');
|
||||
}
|
||||
// Handle read restrictions by mounting tmpfs over denied paths
|
||||
const readDenyPaths = [...(readConfig?.denyOnly || [])];
|
||||
const readAllowPaths = (readConfig?.allowWithinDeny || []).map(p => normalizePathForSandbox(p));
|
||||
// Always hide /etc/ssh/ssh_config.d to avoid permission issues with OrbStack
|
||||
// SSH is very strict about config file permissions and ownership, and they can
|
||||
// appear wrong inside the sandbox causing "Bad owner or permissions" errors
|
||||
if (fs.existsSync('/etc/ssh/ssh_config.d')) {
|
||||
readDenyPaths.push('/etc/ssh/ssh_config.d');
|
||||
}
|
||||
for (const pathPattern of readDenyPaths) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (!fs.existsSync(normalizedPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping non-existent read deny path: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
const readDenyStat = fs.statSync(normalizedPath);
|
||||
if (readDenyStat.isDirectory()) {
|
||||
args.push('--tmpfs', normalizedPath);
|
||||
// Re-allow specific paths within the denied directory (allowRead overrides denyRead).
|
||||
// After mounting tmpfs over the denied dir, bind back the allowed subdirectories
|
||||
// so they are readable again.
|
||||
for (const allowPath of readAllowPaths) {
|
||||
if (allowPath.startsWith(normalizedPath + '/') ||
|
||||
allowPath === normalizedPath) {
|
||||
if (!fs.existsSync(allowPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping non-existent read allow path: ${allowPath}`);
|
||||
continue;
|
||||
}
|
||||
// Bind the allowed path back over the tmpfs so it's readable
|
||||
args.push('--ro-bind', allowPath, allowPath);
|
||||
logForDebugging(`[Sandbox Linux] Re-allowed read access within denied region: ${allowPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// For files, check if this specific file is re-allowed
|
||||
const isReAllowed = readAllowPaths.some(allowPath => normalizedPath === allowPath ||
|
||||
normalizedPath.startsWith(allowPath + '/'));
|
||||
if (isReAllowed) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping read deny for re-allowed path: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
// For files, bind /dev/null instead of tmpfs
|
||||
args.push('--ro-bind', '/dev/null', normalizedPath);
|
||||
}
|
||||
}
|
||||
return args;
|
||||
}
|
||||
/**
|
||||
* Wrap a command with sandbox restrictions on Linux
|
||||
*
|
||||
* UNIX SOCKET BLOCKING (APPLY-SECCOMP):
|
||||
* This implementation uses a custom apply-seccomp binary to block Unix domain socket
|
||||
* creation for user commands while allowing network infrastructure:
|
||||
*
|
||||
* Stage 1: Outer bwrap - Network and filesystem isolation (NO seccomp)
|
||||
* - Bubblewrap starts with isolated network namespace (--unshare-net)
|
||||
* - Bubblewrap applies PID namespace isolation (--unshare-pid and --proc)
|
||||
* - Filesystem restrictions are applied (read-only mounts, bind mounts, etc.)
|
||||
* - Socat processes start and connect to Unix socket bridges (can use socket(AF_UNIX, ...))
|
||||
*
|
||||
* Stage 2: apply-seccomp - Seccomp filter application (ONLY seccomp)
|
||||
* - apply-seccomp binary applies seccomp filter via prctl(PR_SET_SECCOMP)
|
||||
* - Sets PR_SET_NO_NEW_PRIVS to allow seccomp without root
|
||||
* - Execs user command with seccomp active (cannot create new Unix sockets)
|
||||
*
|
||||
* This solves the conflict between:
|
||||
* - Security: Blocking arbitrary Unix socket creation in user commands
|
||||
* - Functionality: Network sandboxing requires socat to call socket(AF_UNIX, ...) for bridge connections
|
||||
*
|
||||
* The seccomp-bpf filter blocks socket(AF_UNIX, ...) syscalls, preventing:
|
||||
* - Creating new Unix domain socket file descriptors
|
||||
*
|
||||
* Security limitations:
|
||||
* - Does NOT block operations (bind, connect, sendto, etc.) on inherited Unix socket FDs
|
||||
* - Does NOT prevent passing Unix socket FDs via SCM_RIGHTS
|
||||
* - For most sandboxing use cases, blocking socket creation is sufficient
|
||||
*
|
||||
* The filter allows:
|
||||
* - All TCP/UDP sockets (AF_INET, AF_INET6) for normal network operations
|
||||
* - All other syscalls
|
||||
*
|
||||
* PLATFORM NOTE:
|
||||
* The allowUnixSockets configuration is not path-based on Linux (unlike macOS)
|
||||
* because seccomp-bpf cannot inspect user-space memory to read socket paths.
|
||||
*
|
||||
* Requirements for seccomp filtering:
|
||||
* - Pre-built apply-seccomp binaries are included for x64 and ARM64
|
||||
* - Pre-generated BPF filters are included for x64 and ARM64
|
||||
* - Other architectures are not currently supported (no apply-seccomp binary available)
|
||||
* - To use sandboxing without Unix socket blocking on unsupported architectures,
|
||||
* set allowAllUnixSockets: true in your configuration
|
||||
* Dependencies are checked by checkLinuxDependencies() before enabling the sandbox.
|
||||
*/
|
||||
export async function wrapCommandWithSandboxLinux(params) {
|
||||
const { command, needsNetworkRestriction, httpSocketPath, socksSocketPath, httpProxyPort, socksProxyPort, readConfig, writeConfig, enableWeakerNestedSandbox, allowAllUnixSockets, binShell, ripgrepConfig = { command: 'rg' }, mandatoryDenySearchDepth = DEFAULT_MANDATORY_DENY_SEARCH_DEPTH, allowGitConfig = false, seccompConfig, abortSignal, } = params;
|
||||
// Determine if we have restrictions to apply
|
||||
// Read: denyOnly pattern - empty array means no restrictions
|
||||
// Write: allowOnly pattern - undefined means no restrictions, any config means restrictions
|
||||
const hasReadRestrictions = readConfig && readConfig.denyOnly.length > 0;
|
||||
const hasWriteRestrictions = writeConfig !== undefined;
|
||||
// Check if we need any sandboxing
|
||||
if (!needsNetworkRestriction &&
|
||||
!hasReadRestrictions &&
|
||||
!hasWriteRestrictions) {
|
||||
return command;
|
||||
}
|
||||
const bwrapArgs = ['--new-session', '--die-with-parent'];
|
||||
let seccompFilterPath = undefined;
|
||||
try {
|
||||
// ========== SECCOMP FILTER (Unix Socket Blocking) ==========
|
||||
// Use bwrap's --seccomp flag to apply BPF filter that blocks Unix socket creation
|
||||
//
|
||||
// NOTE: Seccomp filtering is only enabled when allowAllUnixSockets is false
|
||||
// (when true, Unix sockets are allowed)
|
||||
if (!allowAllUnixSockets) {
|
||||
seccompFilterPath =
|
||||
generateSeccompFilter(seccompConfig?.bpfPath) ?? undefined;
|
||||
const applySeccompBinary = getApplySeccompBinaryPath(seccompConfig?.applyPath);
|
||||
if (!seccompFilterPath || !applySeccompBinary) {
|
||||
// Seccomp binaries not found - warn but continue without unix socket blocking
|
||||
logForDebugging('[Sandbox Linux] Seccomp binaries not available - unix socket blocking disabled. ' +
|
||||
'Install @anthropic-ai/sandbox-runtime globally for full protection.', { level: 'warn' });
|
||||
// Clear the filter path so we don't try to use it
|
||||
seccompFilterPath = undefined;
|
||||
}
|
||||
else {
|
||||
// Track filter for cleanup and register exit handler
|
||||
// Only track runtime-generated filters (not pre-generated ones from vendor/)
|
||||
if (!seccompFilterPath.includes('/vendor/seccomp/')) {
|
||||
generatedSeccompFilters.add(seccompFilterPath);
|
||||
registerExitCleanupHandler();
|
||||
}
|
||||
logForDebugging('[Sandbox Linux] Generated seccomp BPF filter for Unix socket blocking');
|
||||
}
|
||||
}
|
||||
else {
|
||||
logForDebugging('[Sandbox Linux] Skipping seccomp filter - allowAllUnixSockets is enabled');
|
||||
}
|
||||
// ========== NETWORK RESTRICTIONS ==========
|
||||
if (needsNetworkRestriction) {
|
||||
// Always unshare network namespace to isolate network access
|
||||
// This removes all network interfaces, effectively blocking all network
|
||||
bwrapArgs.push('--unshare-net');
|
||||
// If proxy sockets are provided, bind them into the sandbox to allow
|
||||
// filtered network access through the proxy. If not provided, network
|
||||
// is completely blocked (empty allowedDomains = block all)
|
||||
if (httpSocketPath && socksSocketPath) {
|
||||
// Verify socket files still exist before trying to bind them
|
||||
if (!fs.existsSync(httpSocketPath)) {
|
||||
throw new Error(`Linux HTTP bridge socket does not exist: ${httpSocketPath}. ` +
|
||||
'The bridge process may have died. Try reinitializing the sandbox.');
|
||||
}
|
||||
if (!fs.existsSync(socksSocketPath)) {
|
||||
throw new Error(`Linux SOCKS bridge socket does not exist: ${socksSocketPath}. ` +
|
||||
'The bridge process may have died. Try reinitializing the sandbox.');
|
||||
}
|
||||
// Bind both sockets into the sandbox
|
||||
bwrapArgs.push('--bind', httpSocketPath, httpSocketPath);
|
||||
bwrapArgs.push('--bind', socksSocketPath, socksSocketPath);
|
||||
// Add proxy environment variables
|
||||
// HTTP_PROXY points to the socat listener inside the sandbox (port 3128)
|
||||
// which forwards to the Unix socket that bridges to the host's proxy server
|
||||
const proxyEnv = generateProxyEnvVars(3128, // Internal HTTP listener port
|
||||
1080);
|
||||
bwrapArgs.push(...proxyEnv.flatMap((env) => {
|
||||
const firstEq = env.indexOf('=');
|
||||
const key = env.slice(0, firstEq);
|
||||
const value = env.slice(firstEq + 1);
|
||||
return ['--setenv', key, value];
|
||||
}));
|
||||
// Add host proxy port environment variables for debugging/transparency
|
||||
// These show which host ports the Unix socket bridges connect to
|
||||
if (httpProxyPort !== undefined) {
|
||||
bwrapArgs.push('--setenv', 'CLAUDE_CODE_HOST_HTTP_PROXY_PORT', String(httpProxyPort));
|
||||
}
|
||||
if (socksProxyPort !== undefined) {
|
||||
bwrapArgs.push('--setenv', 'CLAUDE_CODE_HOST_SOCKS_PROXY_PORT', String(socksProxyPort));
|
||||
}
|
||||
}
|
||||
// If no sockets provided, network is completely blocked (--unshare-net without proxy)
|
||||
}
|
||||
// ========== FILESYSTEM RESTRICTIONS ==========
|
||||
const fsArgs = await generateFilesystemArgs(readConfig, writeConfig, ripgrepConfig, mandatoryDenySearchDepth, allowGitConfig, abortSignal);
|
||||
bwrapArgs.push(...fsArgs);
|
||||
// Always bind /dev
|
||||
bwrapArgs.push('--dev', '/dev');
|
||||
// ========== PID NAMESPACE ISOLATION ==========
|
||||
// IMPORTANT: These must come AFTER filesystem binds for nested bwrap to work
|
||||
// By default, always unshare PID namespace and mount fresh /proc.
|
||||
// If we don't have --unshare-pid, it is possible to escape the sandbox.
|
||||
// If we don't have --proc, it is possible to read host /proc and leak information about code running
|
||||
// outside the sandbox. But, --proc is not available when running in unprivileged docker containers
|
||||
// so we support running without it if explicitly requested.
|
||||
bwrapArgs.push('--unshare-pid');
|
||||
if (!enableWeakerNestedSandbox) {
|
||||
// Mount fresh /proc if PID namespace is isolated (secure mode)
|
||||
bwrapArgs.push('--proc', '/proc');
|
||||
}
|
||||
// ========== COMMAND ==========
|
||||
// Use the user's shell (zsh, bash, etc.) to ensure aliases/snapshots work
|
||||
// Resolve the full path to the shell binary since bwrap doesn't use $PATH
|
||||
const shellName = binShell || 'bash';
|
||||
const shell = whichSync(shellName);
|
||||
if (!shell) {
|
||||
throw new Error(`Shell '${shellName}' not found in PATH`);
|
||||
}
|
||||
bwrapArgs.push('--', shell, '-c');
|
||||
// If we have network restrictions, use the network bridge setup with apply-seccomp for seccomp
|
||||
// Otherwise, just run the command directly with apply-seccomp if needed
|
||||
if (needsNetworkRestriction && httpSocketPath && socksSocketPath) {
|
||||
// Pass seccomp filter to buildSandboxCommand for apply-seccomp application
|
||||
// This allows socat to start before seccomp is applied
|
||||
const sandboxCommand = buildSandboxCommand(httpSocketPath, socksSocketPath, command, seccompFilterPath, shell, seccompConfig?.applyPath);
|
||||
bwrapArgs.push(sandboxCommand);
|
||||
}
|
||||
else if (seccompFilterPath) {
|
||||
// No network restrictions but we have seccomp - use apply-seccomp directly
|
||||
// apply-seccomp is a simple C program that applies the seccomp filter and execs the command
|
||||
const applySeccompBinary = getApplySeccompBinaryPath(seccompConfig?.applyPath);
|
||||
if (!applySeccompBinary) {
|
||||
throw new Error('apply-seccomp binary not found. This should have been caught earlier. ' +
|
||||
'Ensure vendor/seccomp/{x64,arm64}/apply-seccomp binaries are included in the package.');
|
||||
}
|
||||
const applySeccompCmd = shellquote.quote([
|
||||
applySeccompBinary,
|
||||
seccompFilterPath,
|
||||
shell,
|
||||
'-c',
|
||||
command,
|
||||
]);
|
||||
bwrapArgs.push(applySeccompCmd);
|
||||
}
|
||||
else {
|
||||
bwrapArgs.push(command);
|
||||
}
|
||||
// Build the outer bwrap command
|
||||
const wrappedCommand = shellquote.quote(['bwrap', ...bwrapArgs]);
|
||||
const restrictions = [];
|
||||
if (needsNetworkRestriction)
|
||||
restrictions.push('network');
|
||||
if (hasReadRestrictions || hasWriteRestrictions)
|
||||
restrictions.push('filesystem');
|
||||
if (seccompFilterPath)
|
||||
restrictions.push('seccomp(unix-block)');
|
||||
logForDebugging(`[Sandbox Linux] Wrapped command with bwrap (${restrictions.join(', ')} restrictions)`);
|
||||
return wrappedCommand;
|
||||
}
|
||||
catch (error) {
|
||||
// Clean up seccomp filter on error
|
||||
if (seccompFilterPath && !seccompFilterPath.includes('/vendor/seccomp/')) {
|
||||
generatedSeccompFilters.delete(seccompFilterPath);
|
||||
try {
|
||||
cleanupSeccompFilter(seccompFilterPath);
|
||||
}
|
||||
catch (cleanupError) {
|
||||
logForDebugging(`[Sandbox Linux] Failed to clean up seccomp filter on error: ${cleanupError}`, { level: 'error' });
|
||||
}
|
||||
}
|
||||
// Re-throw the original error
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=linux-sandbox-utils.js.map
|
||||
@@ -1,630 +0,0 @@
|
||||
import shellquote from 'shell-quote';
|
||||
import { spawn } from 'child_process';
|
||||
import * as path from 'path';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
import { whichSync } from '../utils/which.js';
|
||||
import { normalizePathForSandbox, generateProxyEnvVars, encodeSandboxedCommand, decodeSandboxedCommand, containsGlobChars, globToRegex, DANGEROUS_FILES, getDangerousDirectories, } from './sandbox-utils.js';
|
||||
/**
|
||||
* Get mandatory deny patterns as glob patterns (no filesystem scanning).
|
||||
* macOS sandbox profile supports regex/glob matching directly via globToRegex().
|
||||
*/
|
||||
export function macGetMandatoryDenyPatterns(allowGitConfig = false) {
|
||||
const cwd = process.cwd();
|
||||
const denyPaths = [];
|
||||
// Dangerous files - static paths in CWD + glob patterns for subtree
|
||||
for (const fileName of DANGEROUS_FILES) {
|
||||
denyPaths.push(path.resolve(cwd, fileName));
|
||||
denyPaths.push(`**/${fileName}`);
|
||||
}
|
||||
// Dangerous directories
|
||||
for (const dirName of getDangerousDirectories()) {
|
||||
denyPaths.push(path.resolve(cwd, dirName));
|
||||
denyPaths.push(`**/${dirName}/**`);
|
||||
}
|
||||
// Git hooks are always blocked for security
|
||||
denyPaths.push(path.resolve(cwd, '.git/hooks'));
|
||||
denyPaths.push('**/.git/hooks/**');
|
||||
// Git config - conditionally blocked based on allowGitConfig setting
|
||||
if (!allowGitConfig) {
|
||||
denyPaths.push(path.resolve(cwd, '.git/config'));
|
||||
denyPaths.push('**/.git/config');
|
||||
}
|
||||
return [...new Set(denyPaths)];
|
||||
}
|
||||
const sessionSuffix = `_${Math.random().toString(36).slice(2, 11)}_SBX`;
|
||||
/**
|
||||
* Generate a unique log tag for sandbox monitoring
|
||||
* @param command - The command being executed (will be base64 encoded)
|
||||
*/
|
||||
function generateLogTag(command) {
|
||||
const encodedCommand = encodeSandboxedCommand(command);
|
||||
return `CMD64_${encodedCommand}_END_${sessionSuffix}`;
|
||||
}
|
||||
/**
|
||||
* Get all ancestor directories for a path, up to (but not including) root
|
||||
* Example: /private/tmp/test/file.txt -> ["/private/tmp/test", "/private/tmp", "/private"]
|
||||
*/
|
||||
function getAncestorDirectories(pathStr) {
|
||||
const ancestors = [];
|
||||
let currentPath = path.dirname(pathStr);
|
||||
// Walk up the directory tree until we reach root
|
||||
while (currentPath !== '/' && currentPath !== '.') {
|
||||
ancestors.push(currentPath);
|
||||
const parentPath = path.dirname(currentPath);
|
||||
// Break if we've reached the top (path.dirname returns the same path for root)
|
||||
if (parentPath === currentPath) {
|
||||
break;
|
||||
}
|
||||
currentPath = parentPath;
|
||||
}
|
||||
return ancestors;
|
||||
}
|
||||
/**
|
||||
* Generate deny rules for file movement (file-write-unlink) to protect paths
|
||||
* This prevents bypassing read or write restrictions by moving files/directories
|
||||
*
|
||||
* @param pathPatterns - Array of path patterns to protect (can include globs)
|
||||
* @param logTag - Log tag for sandbox violations
|
||||
* @returns Array of sandbox profile rule lines
|
||||
*/
|
||||
function generateMoveBlockingRules(pathPatterns, logTag) {
|
||||
const rules = [];
|
||||
for (const pathPattern of pathPatterns) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Use regex matching for glob patterns
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
// Block moving/renaming files matching this pattern
|
||||
rules.push(`(deny file-write-unlink`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
// For glob patterns, extract the static prefix and block ancestor moves
|
||||
// Remove glob characters to get the directory prefix
|
||||
const staticPrefix = normalizedPath.split(/[*?[\]]/)[0];
|
||||
if (staticPrefix && staticPrefix !== '/') {
|
||||
// Get the directory containing the glob pattern
|
||||
const baseDir = staticPrefix.endsWith('/')
|
||||
? staticPrefix.slice(0, -1)
|
||||
: path.dirname(staticPrefix);
|
||||
// Block moves of the base directory itself
|
||||
rules.push(`(deny file-write-unlink`, ` (literal ${escapePath(baseDir)})`, ` (with message "${logTag}"))`);
|
||||
// Block moves of ancestor directories
|
||||
for (const ancestorDir of getAncestorDirectories(baseDir)) {
|
||||
rules.push(`(deny file-write-unlink`, ` (literal ${escapePath(ancestorDir)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Use subpath matching for literal paths
|
||||
// Block moving/renaming the denied path itself
|
||||
rules.push(`(deny file-write-unlink`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
// Block moves of ancestor directories
|
||||
for (const ancestorDir of getAncestorDirectories(normalizedPath)) {
|
||||
rules.push(`(deny file-write-unlink`, ` (literal ${escapePath(ancestorDir)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Generate filesystem read rules for sandbox profile
|
||||
*
|
||||
* Supports two layers:
|
||||
* 1. denyOnly: deny reads from these paths (broad regions like /Users)
|
||||
* 2. allowWithinDeny: re-allow reads within denied regions (like CWD)
|
||||
* allowWithinDeny takes precedence over denyOnly.
|
||||
*
|
||||
* In Seatbelt profiles, later rules take precedence, so we emit:
|
||||
* (allow file-read*) ← default: allow everything
|
||||
* (deny file-read* ...) ← deny broad regions
|
||||
* (allow file-read* ...) ← re-allow specific paths within denied regions
|
||||
*/
|
||||
function generateReadRules(config, logTag) {
|
||||
if (!config) {
|
||||
return [`(allow file-read*)`];
|
||||
}
|
||||
const rules = [];
|
||||
// Start by allowing everything
|
||||
rules.push(`(allow file-read*)`);
|
||||
// Then deny specific paths
|
||||
for (const pathPattern of config.denyOnly || []) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Use regex matching for glob patterns
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
rules.push(`(deny file-read*`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
else {
|
||||
// Use subpath matching for literal paths
|
||||
rules.push(`(deny file-read*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
// Re-allow specific paths within denied regions (allowWithinDeny takes precedence)
|
||||
for (const pathPattern of config.allowWithinDeny || []) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
rules.push(`(allow file-read*`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
else {
|
||||
rules.push(`(allow file-read*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
// Allow stat/lstat on all directories so that realpath() can traverse
|
||||
// path components within denied regions. Without this, C realpath() fails
|
||||
// when resolving symlinks because it needs to lstat every intermediate
|
||||
// directory (e.g. /Users, /Users/chris) even if only a subdirectory like
|
||||
// ~/.local is in allowWithinDeny. This only allows metadata reads on
|
||||
// directories — not listing contents (readdir) or reading files.
|
||||
if ((config.denyOnly).length > 0) {
|
||||
rules.push(`(allow file-read-metadata`, ` (vnode-type DIRECTORY))`);
|
||||
}
|
||||
// Block file movement to prevent bypass via mv/rename
|
||||
rules.push(...generateMoveBlockingRules(config.denyOnly || [], logTag));
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Generate filesystem write rules for sandbox profile
|
||||
*/
|
||||
function generateWriteRules(config, logTag, allowGitConfig = false) {
|
||||
if (!config) {
|
||||
return [`(allow file-write*)`];
|
||||
}
|
||||
const rules = [];
|
||||
// Automatically allow TMPDIR parent on macOS when write restrictions are enabled
|
||||
const tmpdirParents = getTmpdirParentIfMacOSPattern();
|
||||
for (const tmpdirParent of tmpdirParents) {
|
||||
const normalizedPath = normalizePathForSandbox(tmpdirParent);
|
||||
rules.push(`(allow file-write*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
// Generate allow rules
|
||||
for (const pathPattern of config.allowOnly || []) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Use regex matching for glob patterns
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
rules.push(`(allow file-write*`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
else {
|
||||
// Use subpath matching for literal paths
|
||||
rules.push(`(allow file-write*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
// Combine user-specified and mandatory deny patterns (no ripgrep needed on macOS)
|
||||
const denyPaths = [
|
||||
...(config.denyWithinAllow || []),
|
||||
...macGetMandatoryDenyPatterns(allowGitConfig),
|
||||
];
|
||||
for (const pathPattern of denyPaths) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Use regex matching for glob patterns
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
rules.push(`(deny file-write*`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
else {
|
||||
// Use subpath matching for literal paths
|
||||
rules.push(`(deny file-write*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
// Block file movement to prevent bypass via mv/rename
|
||||
rules.push(...generateMoveBlockingRules(denyPaths, logTag));
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Generate complete sandbox profile
|
||||
*/
|
||||
function generateSandboxProfile({ readConfig, writeConfig, httpProxyPort, socksProxyPort, needsNetworkRestriction, allowUnixSockets, allowAllUnixSockets, allowLocalBinding, allowPty, allowGitConfig = false, enableWeakerNetworkIsolation = false, logTag, }) {
|
||||
const profile = [
|
||||
'(version 1)',
|
||||
`(deny default (with message "${logTag}"))`,
|
||||
'',
|
||||
`; LogTag: ${logTag}`,
|
||||
'',
|
||||
'; Essential permissions - based on Chrome sandbox policy',
|
||||
'; Process permissions',
|
||||
'(allow process-exec)',
|
||||
'(allow process-fork)',
|
||||
'(allow process-info* (target same-sandbox))',
|
||||
'(allow signal (target same-sandbox))',
|
||||
'(allow mach-priv-task-port (target same-sandbox))',
|
||||
'',
|
||||
'; User preferences',
|
||||
'(allow user-preference-read)',
|
||||
'',
|
||||
'; Mach IPC - specific services only (no wildcard)',
|
||||
'(allow mach-lookup',
|
||||
' (global-name "com.apple.audio.systemsoundserver")',
|
||||
' (global-name "com.apple.distributed_notifications@Uv3")',
|
||||
' (global-name "com.apple.FontObjectsServer")',
|
||||
' (global-name "com.apple.fonts")',
|
||||
' (global-name "com.apple.logd")',
|
||||
' (global-name "com.apple.lsd.mapdb")',
|
||||
' (global-name "com.apple.PowerManagement.control")',
|
||||
' (global-name "com.apple.system.logger")',
|
||||
' (global-name "com.apple.system.notification_center")',
|
||||
' (global-name "com.apple.system.opendirectoryd.libinfo")',
|
||||
' (global-name "com.apple.system.opendirectoryd.membership")',
|
||||
' (global-name "com.apple.bsd.dirhelper")',
|
||||
' (global-name "com.apple.securityd.xpc")',
|
||||
' (global-name "com.apple.coreservices.launchservicesd")',
|
||||
')',
|
||||
'',
|
||||
...(enableWeakerNetworkIsolation
|
||||
? [
|
||||
'; trustd.agent - needed for Go TLS certificate verification (weaker network isolation)',
|
||||
'(allow mach-lookup (global-name "com.apple.trustd.agent"))',
|
||||
]
|
||||
: []),
|
||||
'',
|
||||
'; POSIX IPC - shared memory',
|
||||
'(allow ipc-posix-shm)',
|
||||
'',
|
||||
'; POSIX IPC - semaphores for Python multiprocessing',
|
||||
'(allow ipc-posix-sem)',
|
||||
'',
|
||||
'; IOKit - specific operations only',
|
||||
'(allow iokit-open',
|
||||
' (iokit-registry-entry-class "IOSurfaceRootUserClient")',
|
||||
' (iokit-registry-entry-class "RootDomainUserClient")',
|
||||
' (iokit-user-client-class "IOSurfaceSendRight")',
|
||||
')',
|
||||
'',
|
||||
'; IOKit properties',
|
||||
'(allow iokit-get-properties)',
|
||||
'',
|
||||
"; Specific safe system-sockets, doesn't allow network access",
|
||||
'(allow system-socket (require-all (socket-domain AF_SYSTEM) (socket-protocol 2)))',
|
||||
'',
|
||||
'; sysctl - specific sysctls only',
|
||||
'(allow sysctl-read',
|
||||
' (sysctl-name "hw.activecpu")',
|
||||
' (sysctl-name "hw.busfrequency_compat")',
|
||||
' (sysctl-name "hw.byteorder")',
|
||||
' (sysctl-name "hw.cacheconfig")',
|
||||
' (sysctl-name "hw.cachelinesize_compat")',
|
||||
' (sysctl-name "hw.cpufamily")',
|
||||
' (sysctl-name "hw.cpufrequency")',
|
||||
' (sysctl-name "hw.cpufrequency_compat")',
|
||||
' (sysctl-name "hw.cputype")',
|
||||
' (sysctl-name "hw.l1dcachesize_compat")',
|
||||
' (sysctl-name "hw.l1icachesize_compat")',
|
||||
' (sysctl-name "hw.l2cachesize_compat")',
|
||||
' (sysctl-name "hw.l3cachesize_compat")',
|
||||
' (sysctl-name "hw.logicalcpu")',
|
||||
' (sysctl-name "hw.logicalcpu_max")',
|
||||
' (sysctl-name "hw.machine")',
|
||||
' (sysctl-name "hw.memsize")',
|
||||
' (sysctl-name "hw.ncpu")',
|
||||
' (sysctl-name "hw.nperflevels")',
|
||||
' (sysctl-name "hw.packages")',
|
||||
' (sysctl-name "hw.pagesize_compat")',
|
||||
' (sysctl-name "hw.pagesize")',
|
||||
' (sysctl-name "hw.physicalcpu")',
|
||||
' (sysctl-name "hw.physicalcpu_max")',
|
||||
' (sysctl-name "hw.tbfrequency_compat")',
|
||||
' (sysctl-name "hw.vectorunit")',
|
||||
' (sysctl-name "kern.argmax")',
|
||||
' (sysctl-name "kern.bootargs")',
|
||||
' (sysctl-name "kern.hostname")',
|
||||
' (sysctl-name "kern.maxfiles")',
|
||||
' (sysctl-name "kern.maxfilesperproc")',
|
||||
' (sysctl-name "kern.maxproc")',
|
||||
' (sysctl-name "kern.ngroups")',
|
||||
' (sysctl-name "kern.osproductversion")',
|
||||
' (sysctl-name "kern.osrelease")',
|
||||
' (sysctl-name "kern.ostype")',
|
||||
' (sysctl-name "kern.osvariant_status")',
|
||||
' (sysctl-name "kern.osversion")',
|
||||
' (sysctl-name "kern.secure_kernel")',
|
||||
' (sysctl-name "kern.tcsm_available")',
|
||||
' (sysctl-name "kern.tcsm_enable")',
|
||||
' (sysctl-name "kern.usrstack64")',
|
||||
' (sysctl-name "kern.version")',
|
||||
' (sysctl-name "kern.willshutdown")',
|
||||
' (sysctl-name "machdep.cpu.brand_string")',
|
||||
' (sysctl-name "machdep.ptrauth_enabled")',
|
||||
' (sysctl-name "security.mac.lockdown_mode_state")',
|
||||
' (sysctl-name "sysctl.proc_cputype")',
|
||||
' (sysctl-name "vm.loadavg")',
|
||||
' (sysctl-name-prefix "hw.optional.arm")',
|
||||
' (sysctl-name-prefix "hw.optional.arm.")',
|
||||
' (sysctl-name-prefix "hw.optional.armv8_")',
|
||||
' (sysctl-name-prefix "hw.perflevel")',
|
||||
' (sysctl-name-prefix "kern.proc.all")',
|
||||
' (sysctl-name-prefix "kern.proc.pgrp.")',
|
||||
' (sysctl-name-prefix "kern.proc.pid.")',
|
||||
' (sysctl-name-prefix "machdep.cpu.")',
|
||||
' (sysctl-name-prefix "net.routetable.")',
|
||||
')',
|
||||
'',
|
||||
'; V8 thread calculations',
|
||||
'(allow sysctl-write',
|
||||
' (sysctl-name "kern.tcsm_enable")',
|
||||
')',
|
||||
'',
|
||||
'; Distributed notifications',
|
||||
'(allow distributed-notification-post)',
|
||||
'',
|
||||
'; Specific mach-lookup permissions for security operations',
|
||||
'(allow mach-lookup (global-name "com.apple.SecurityServer"))',
|
||||
'',
|
||||
'; File I/O on device files',
|
||||
'(allow file-ioctl (literal "/dev/null"))',
|
||||
'(allow file-ioctl (literal "/dev/zero"))',
|
||||
'(allow file-ioctl (literal "/dev/random"))',
|
||||
'(allow file-ioctl (literal "/dev/urandom"))',
|
||||
'(allow file-ioctl (literal "/dev/dtracehelper"))',
|
||||
'(allow file-ioctl (literal "/dev/tty"))',
|
||||
'',
|
||||
'(allow file-ioctl file-read-data file-write-data',
|
||||
' (require-all',
|
||||
' (literal "/dev/null")',
|
||||
' (vnode-type CHARACTER-DEVICE)',
|
||||
' )',
|
||||
')',
|
||||
'',
|
||||
];
|
||||
// Network rules
|
||||
profile.push('; Network');
|
||||
if (!needsNetworkRestriction) {
|
||||
profile.push('(allow network*)');
|
||||
}
|
||||
else {
|
||||
// Allow local binding if requested
|
||||
// Use "*:*" instead of "localhost:*" because modern runtimes (Java, etc.) create
|
||||
// IPv6 dual-stack sockets by default. When binding such a socket to 127.0.0.1,
|
||||
// the kernel represents it as ::ffff:127.0.0.1 (IPv4-mapped IPv6). Seatbelt's
|
||||
// "localhost" filter only matches 127.0.0.1 and ::1, NOT ::ffff:127.0.0.1.
|
||||
// Using (local ip "*:*") is safe because it only matches the LOCAL endpoint —
|
||||
// internet-bound connections originate from non-loopback interfaces, so they
|
||||
// remain blocked by (deny default).
|
||||
if (allowLocalBinding) {
|
||||
profile.push('(allow network-bind (local ip "*:*"))');
|
||||
profile.push('(allow network-inbound (local ip "*:*"))');
|
||||
profile.push('(allow network-outbound (local ip "*:*"))');
|
||||
}
|
||||
// Unix domain sockets for local IPC (SSH agent, Docker, Gradle, etc.)
|
||||
// Three separate operations must be allowed:
|
||||
// 1. system-socket: socket(AF_UNIX, ...) syscall — creates the socket fd (no path context)
|
||||
// 2. network-bind: bind() to a local Unix socket path
|
||||
// 3. network-outbound: connect() to a remote Unix socket path
|
||||
// Note: (subpath ...) and (path-regex ...) are path-based filters that can only match
|
||||
// bind/connect operations — socket() creation has no path, so it requires system-socket.
|
||||
if (allowAllUnixSockets) {
|
||||
// Allow creating AF_UNIX sockets and all Unix socket paths
|
||||
profile.push('(allow system-socket (socket-domain AF_UNIX))');
|
||||
profile.push('(allow network-bind (local unix-socket (path-regex #"^/")))');
|
||||
profile.push('(allow network-outbound (remote unix-socket (path-regex #"^/")))');
|
||||
}
|
||||
else if (allowUnixSockets && allowUnixSockets.length > 0) {
|
||||
// Allow creating AF_UNIX sockets (required for any Unix socket use)
|
||||
profile.push('(allow system-socket (socket-domain AF_UNIX))');
|
||||
// Allow specific Unix socket paths
|
||||
for (const socketPath of allowUnixSockets) {
|
||||
const normalizedPath = normalizePathForSandbox(socketPath);
|
||||
profile.push(`(allow network-bind (local unix-socket (subpath ${escapePath(normalizedPath)})))`);
|
||||
profile.push(`(allow network-outbound (remote unix-socket (subpath ${escapePath(normalizedPath)})))`);
|
||||
}
|
||||
}
|
||||
// If both allowAllUnixSockets and allowUnixSockets are false/undefined/empty, Unix sockets are blocked by default
|
||||
// Allow localhost TCP operations for the HTTP proxy
|
||||
if (httpProxyPort !== undefined) {
|
||||
profile.push(`(allow network-bind (local ip "localhost:${httpProxyPort}"))`);
|
||||
profile.push(`(allow network-inbound (local ip "localhost:${httpProxyPort}"))`);
|
||||
profile.push(`(allow network-outbound (remote ip "localhost:${httpProxyPort}"))`);
|
||||
}
|
||||
// Allow localhost TCP operations for the SOCKS proxy
|
||||
if (socksProxyPort !== undefined) {
|
||||
profile.push(`(allow network-bind (local ip "localhost:${socksProxyPort}"))`);
|
||||
profile.push(`(allow network-inbound (local ip "localhost:${socksProxyPort}"))`);
|
||||
profile.push(`(allow network-outbound (remote ip "localhost:${socksProxyPort}"))`);
|
||||
}
|
||||
}
|
||||
profile.push('');
|
||||
// Read rules
|
||||
profile.push('; File read');
|
||||
profile.push(...generateReadRules(readConfig, logTag));
|
||||
profile.push('');
|
||||
// Write rules
|
||||
profile.push('; File write');
|
||||
profile.push(...generateWriteRules(writeConfig, logTag, allowGitConfig));
|
||||
// Pseudo-terminal (pty) support
|
||||
if (allowPty) {
|
||||
profile.push('');
|
||||
profile.push('; Pseudo-terminal (pty) support');
|
||||
profile.push('(allow pseudo-tty)');
|
||||
profile.push('(allow file-ioctl');
|
||||
profile.push(' (literal "/dev/ptmx")');
|
||||
profile.push(' (regex #"^/dev/ttys")');
|
||||
profile.push(')');
|
||||
profile.push('(allow file-read* file-write*');
|
||||
profile.push(' (literal "/dev/ptmx")');
|
||||
profile.push(' (regex #"^/dev/ttys")');
|
||||
profile.push(')');
|
||||
}
|
||||
return profile.join('\n');
|
||||
}
|
||||
/**
|
||||
* Escape path for sandbox profile using JSON.stringify for proper escaping
|
||||
*/
|
||||
function escapePath(pathStr) {
|
||||
return JSON.stringify(pathStr);
|
||||
}
|
||||
/**
|
||||
* Get TMPDIR parent directory if it matches macOS pattern /var/folders/XX/YYY/T/
|
||||
* Returns both /var/ and /private/var/ versions since /var is a symlink
|
||||
*/
|
||||
function getTmpdirParentIfMacOSPattern() {
|
||||
const tmpdir = process.env.TMPDIR;
|
||||
if (!tmpdir)
|
||||
return [];
|
||||
const match = tmpdir.match(/^\/(private\/)?var\/folders\/[^/]{2}\/[^/]+\/T\/?$/);
|
||||
if (!match)
|
||||
return [];
|
||||
const parent = tmpdir.replace(/\/T\/?$/, '');
|
||||
// Return both /var/ and /private/var/ versions since /var is a symlink
|
||||
if (parent.startsWith('/private/var/')) {
|
||||
return [parent, parent.replace('/private', '')];
|
||||
}
|
||||
else if (parent.startsWith('/var/')) {
|
||||
return [parent, '/private' + parent];
|
||||
}
|
||||
return [parent];
|
||||
}
|
||||
/**
|
||||
* Wrap command with macOS sandbox
|
||||
*/
|
||||
export function wrapCommandWithSandboxMacOS(params) {
|
||||
const { command, needsNetworkRestriction, httpProxyPort, socksProxyPort, allowUnixSockets, allowAllUnixSockets, allowLocalBinding, readConfig, writeConfig, allowPty, allowGitConfig = false, enableWeakerNetworkIsolation = false, binShell, } = params;
|
||||
// Determine if we have restrictions to apply
|
||||
// Read: denyOnly pattern - empty array means no restrictions
|
||||
// Write: allowOnly pattern - undefined means no restrictions, any config means restrictions
|
||||
const hasReadRestrictions = readConfig && readConfig.denyOnly.length > 0;
|
||||
const hasWriteRestrictions = writeConfig !== undefined;
|
||||
// No sandboxing needed
|
||||
if (!needsNetworkRestriction &&
|
||||
!hasReadRestrictions &&
|
||||
!hasWriteRestrictions) {
|
||||
return command;
|
||||
}
|
||||
const logTag = generateLogTag(command);
|
||||
const profile = generateSandboxProfile({
|
||||
readConfig,
|
||||
writeConfig,
|
||||
httpProxyPort,
|
||||
socksProxyPort,
|
||||
needsNetworkRestriction,
|
||||
allowUnixSockets,
|
||||
allowAllUnixSockets,
|
||||
allowLocalBinding,
|
||||
allowPty,
|
||||
allowGitConfig,
|
||||
enableWeakerNetworkIsolation,
|
||||
logTag,
|
||||
});
|
||||
// Generate proxy environment variables using shared utility
|
||||
const proxyEnvArgs = generateProxyEnvVars(httpProxyPort, socksProxyPort);
|
||||
// Use the user's shell (zsh, bash, etc.) to ensure aliases/snapshots work
|
||||
// Resolve the full path to the shell binary
|
||||
const shellName = binShell || 'bash';
|
||||
const shell = whichSync(shellName);
|
||||
if (!shell) {
|
||||
throw new Error(`Shell '${shellName}' not found in PATH`);
|
||||
}
|
||||
// Use `env` command to set environment variables - each VAR=value is a separate
|
||||
// argument that shellquote handles properly, avoiding shell quoting issues
|
||||
const wrappedCommand = shellquote.quote([
|
||||
'env',
|
||||
...proxyEnvArgs,
|
||||
'sandbox-exec',
|
||||
'-p',
|
||||
profile,
|
||||
shell,
|
||||
'-c',
|
||||
command,
|
||||
]);
|
||||
logForDebugging(`[Sandbox macOS] Applied restrictions - network: ${!!(httpProxyPort || socksProxyPort)}, read: ${readConfig
|
||||
? 'allowAllExcept' in readConfig
|
||||
? 'allowAllExcept'
|
||||
: 'denyAllExcept'
|
||||
: 'none'}, write: ${writeConfig
|
||||
? 'allowAllExcept' in writeConfig
|
||||
? 'allowAllExcept'
|
||||
: 'denyAllExcept'
|
||||
: 'none'}`);
|
||||
return wrappedCommand;
|
||||
}
|
||||
/**
|
||||
* Start monitoring macOS system logs for sandbox violations
|
||||
* Look for sandbox-related kernel deny events ending in {logTag}
|
||||
*/
|
||||
export function startMacOSSandboxLogMonitor(callback, ignoreViolations) {
|
||||
// Pre-compile regex patterns for better performance
|
||||
const cmdExtractRegex = /CMD64_(.+?)_END/;
|
||||
const sandboxExtractRegex = /Sandbox:\s+(.+)$/;
|
||||
// Pre-process ignore patterns for faster lookup
|
||||
const wildcardPaths = ignoreViolations?.['*'] || [];
|
||||
const commandPatterns = ignoreViolations
|
||||
? Object.entries(ignoreViolations).filter(([pattern]) => pattern !== '*')
|
||||
: [];
|
||||
// Stream and filter kernel logs for all sandbox violations
|
||||
// We can't filter by specific logTag since it's dynamic per command
|
||||
const logProcess = spawn('log', [
|
||||
'stream',
|
||||
'--predicate',
|
||||
`(eventMessage ENDSWITH "${sessionSuffix}")`,
|
||||
'--style',
|
||||
'compact',
|
||||
]);
|
||||
logProcess.stdout?.on('data', (data) => {
|
||||
const lines = data.toString().split('\n');
|
||||
// Get violation and command lines
|
||||
const violationLine = lines.find(line => line.includes('Sandbox:') && line.includes('deny'));
|
||||
const commandLine = lines.find(line => line.startsWith('CMD64_'));
|
||||
if (!violationLine)
|
||||
return;
|
||||
// Extract violation details
|
||||
const sandboxMatch = violationLine.match(sandboxExtractRegex);
|
||||
if (!sandboxMatch?.[1])
|
||||
return;
|
||||
const violationDetails = sandboxMatch[1];
|
||||
// Try to get command
|
||||
let command;
|
||||
let encodedCommand;
|
||||
if (commandLine) {
|
||||
const cmdMatch = commandLine.match(cmdExtractRegex);
|
||||
encodedCommand = cmdMatch?.[1];
|
||||
if (encodedCommand) {
|
||||
try {
|
||||
command = decodeSandboxedCommand(encodedCommand);
|
||||
}
|
||||
catch {
|
||||
// Failed to decode, continue without command
|
||||
}
|
||||
}
|
||||
}
|
||||
// Always filter out noisey violations
|
||||
if (violationDetails.includes('mDNSResponder') ||
|
||||
violationDetails.includes('mach-lookup com.apple.diagnosticd') ||
|
||||
violationDetails.includes('mach-lookup com.apple.analyticsd')) {
|
||||
return;
|
||||
}
|
||||
// Check if we should ignore this violation
|
||||
if (ignoreViolations && command) {
|
||||
// Check wildcard patterns first
|
||||
if (wildcardPaths.length > 0) {
|
||||
const shouldIgnore = wildcardPaths.some(path => violationDetails.includes(path));
|
||||
if (shouldIgnore)
|
||||
return;
|
||||
}
|
||||
// Check command-specific patterns
|
||||
for (const [pattern, paths] of commandPatterns) {
|
||||
if (command.includes(pattern)) {
|
||||
const shouldIgnore = paths.some(path => violationDetails.includes(path));
|
||||
if (shouldIgnore)
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Not ignored - report the violation
|
||||
callback({
|
||||
line: violationDetails,
|
||||
command,
|
||||
encodedCommand,
|
||||
timestamp: new Date(), // We could parse the timestamp from the log but this feels more reliable
|
||||
});
|
||||
});
|
||||
logProcess.stderr?.on('data', (data) => {
|
||||
logForDebugging(`[Sandbox Monitor] Log stream stderr: ${data.toString()}`);
|
||||
});
|
||||
logProcess.on('error', (error) => {
|
||||
logForDebugging(`[Sandbox Monitor] Failed to start log stream: ${error.message}`);
|
||||
});
|
||||
logProcess.on('exit', (code) => {
|
||||
logForDebugging(`[Sandbox Monitor] Log stream exited with code: ${code}`);
|
||||
});
|
||||
return () => {
|
||||
logForDebugging('[Sandbox Monitor] Stopping log monitor');
|
||||
logProcess.kill('SIGTERM');
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=macos-sandbox-utils.js.map
|
||||
180
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-config.js
generated
vendored
180
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-config.js
generated
vendored
@@ -1,180 +0,0 @@
|
||||
/**
|
||||
* Configuration for Sandbox Runtime
|
||||
* This is the main configuration interface that consumers pass to SandboxManager.initialize()
|
||||
*/
|
||||
import { z } from 'zod';
|
||||
/**
|
||||
* Schema for domain patterns (e.g., "example.com", "*.npmjs.org")
|
||||
* Validates that domain patterns are safe and don't include overly broad wildcards
|
||||
*/
|
||||
const domainPatternSchema = z.string().refine(val => {
|
||||
// Reject protocols, paths, ports, etc.
|
||||
if (val.includes('://') || val.includes('/') || val.includes(':')) {
|
||||
return false;
|
||||
}
|
||||
// Allow localhost
|
||||
if (val === 'localhost')
|
||||
return true;
|
||||
// Allow wildcard domains like *.example.com
|
||||
if (val.startsWith('*.')) {
|
||||
const domain = val.slice(2);
|
||||
// After the *. there must be a valid domain with at least one more dot
|
||||
// e.g., *.example.com is valid, *.com is not (too broad)
|
||||
if (!domain.includes('.') ||
|
||||
domain.startsWith('.') ||
|
||||
domain.endsWith('.')) {
|
||||
return false;
|
||||
}
|
||||
// Count dots - must have at least 2 parts after the wildcard (e.g., example.com)
|
||||
const parts = domain.split('.');
|
||||
return parts.length >= 2 && parts.every(p => p.length > 0);
|
||||
}
|
||||
// Reject any other use of wildcards (e.g., *, *., etc.)
|
||||
if (val.includes('*')) {
|
||||
return false;
|
||||
}
|
||||
// Regular domains must have at least one dot and only valid characters
|
||||
return val.includes('.') && !val.startsWith('.') && !val.endsWith('.');
|
||||
}, {
|
||||
message: 'Invalid domain pattern. Must be a valid domain (e.g., "example.com") or wildcard (e.g., "*.example.com"). Overly broad patterns like "*.com" or "*" are not allowed for security reasons.',
|
||||
});
|
||||
/**
|
||||
* Schema for filesystem paths
|
||||
*/
|
||||
const filesystemPathSchema = z.string().min(1, 'Path cannot be empty');
|
||||
/**
|
||||
* Schema for MITM proxy configuration
|
||||
* Allows routing specific domains through an upstream MITM proxy via Unix socket
|
||||
*/
|
||||
const MitmProxyConfigSchema = z.object({
|
||||
socketPath: z.string().min(1).describe('Unix socket path to the MITM proxy'),
|
||||
domains: z
|
||||
.array(domainPatternSchema)
|
||||
.min(1)
|
||||
.describe('Domains to route through the MITM proxy (e.g., ["api.example.com", "*.internal.org"])'),
|
||||
});
|
||||
/**
|
||||
* Network configuration schema for validation
|
||||
*/
|
||||
export const NetworkConfigSchema = z.object({
|
||||
allowedDomains: z
|
||||
.array(domainPatternSchema)
|
||||
.describe('List of allowed domains (e.g., ["github.com", "*.npmjs.org"])'),
|
||||
deniedDomains: z
|
||||
.array(domainPatternSchema)
|
||||
.describe('List of denied domains'),
|
||||
allowUnixSockets: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('macOS only: Unix socket paths to allow. Ignored on Linux (seccomp cannot filter by path).'),
|
||||
allowAllUnixSockets: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('If true, allow all Unix sockets (disables blocking on both platforms).'),
|
||||
allowLocalBinding: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether to allow binding to local ports (default: false)'),
|
||||
httpProxyPort: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.max(65535)
|
||||
.optional()
|
||||
.describe('Port of an external HTTP proxy to use instead of starting a local one. When provided, the library will skip starting its own HTTP proxy and use this port. The external proxy must handle domain filtering.'),
|
||||
socksProxyPort: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.max(65535)
|
||||
.optional()
|
||||
.describe('Port of an external SOCKS proxy to use instead of starting a local one. When provided, the library will skip starting its own SOCKS proxy and use this port. The external proxy must handle domain filtering.'),
|
||||
mitmProxy: MitmProxyConfigSchema.optional().describe('Optional MITM proxy configuration. Routes matching domains through an upstream proxy via Unix socket while SRT still handles allow/deny filtering.'),
|
||||
});
|
||||
/**
|
||||
* Filesystem configuration schema for validation
|
||||
*/
|
||||
export const FilesystemConfigSchema = z.object({
|
||||
denyRead: z.array(filesystemPathSchema).describe('Paths denied for reading'),
|
||||
allowRead: z
|
||||
.array(filesystemPathSchema)
|
||||
.optional()
|
||||
.describe('Paths to re-allow reading within denied regions (takes precedence over denyRead). ' +
|
||||
'Use with denyRead to deny a broad region then allow back specific subdirectories.'),
|
||||
allowWrite: z
|
||||
.array(filesystemPathSchema)
|
||||
.describe('Paths allowed for writing'),
|
||||
denyWrite: z
|
||||
.array(filesystemPathSchema)
|
||||
.describe('Paths denied for writing (takes precedence over allowWrite)'),
|
||||
allowGitConfig: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Allow writes to .git/config files (default: false). Enables git remote URL updates while keeping .git/hooks protected.'),
|
||||
});
|
||||
/**
|
||||
* Configuration schema for ignoring specific sandbox violations
|
||||
* Maps command patterns to filesystem paths to ignore violations for.
|
||||
*/
|
||||
export const IgnoreViolationsConfigSchema = z
|
||||
.record(z.string(), z.array(z.string()))
|
||||
.describe('Map of command patterns to filesystem paths to ignore violations for. Use "*" to match all commands');
|
||||
/**
|
||||
* Ripgrep configuration schema
|
||||
*/
|
||||
export const RipgrepConfigSchema = z.object({
|
||||
command: z.string().describe('The ripgrep command to execute'),
|
||||
args: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('Additional arguments to pass before ripgrep args'),
|
||||
argv0: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Override argv[0] when spawning (for multicall binaries that dispatch on argv[0])'),
|
||||
});
|
||||
/**
|
||||
* Seccomp configuration schema (Linux only)
|
||||
* Allows specifying custom paths to seccomp binaries
|
||||
*/
|
||||
export const SeccompConfigSchema = z.object({
|
||||
bpfPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Path to the unix-block.bpf filter file'),
|
||||
applyPath: z.string().optional().describe('Path to the apply-seccomp binary'),
|
||||
});
|
||||
/**
|
||||
* Main configuration schema for Sandbox Runtime validation
|
||||
*/
|
||||
export const SandboxRuntimeConfigSchema = z.object({
|
||||
network: NetworkConfigSchema.describe('Network restrictions configuration'),
|
||||
filesystem: FilesystemConfigSchema.describe('Filesystem restrictions configuration'),
|
||||
ignoreViolations: IgnoreViolationsConfigSchema.optional().describe('Optional configuration for ignoring specific violations'),
|
||||
enableWeakerNestedSandbox: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Enable weaker nested sandbox mode (for Docker environments)'),
|
||||
enableWeakerNetworkIsolation: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Enable weaker network isolation to allow access to com.apple.trustd.agent (macOS only). ' +
|
||||
'This is needed for Go programs (gh, gcloud, terraform, kubectl, etc.) to verify TLS certificates ' +
|
||||
'when using httpProxyPort with a MITM proxy and custom CA. Enabling this opens a potential data ' +
|
||||
'exfiltration vector through the trustd service. Only enable if you need Go TLS verification.'),
|
||||
ripgrep: RipgrepConfigSchema.optional().describe('Custom ripgrep configuration (default: { command: "rg" })'),
|
||||
mandatoryDenySearchDepth: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.max(10)
|
||||
.optional()
|
||||
.describe('Maximum directory depth to search for dangerous files on Linux (default: 3). ' +
|
||||
'Higher values provide more protection but slower performance.'),
|
||||
allowPty: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Allow pseudo-terminal (pty) operations (macOS only)'),
|
||||
seccomp: SeccompConfigSchema.optional().describe('Custom seccomp binary paths (Linux only).'),
|
||||
});
|
||||
//# sourceMappingURL=sandbox-config.js.map
|
||||
786
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-manager.js
generated
vendored
786
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-manager.js
generated
vendored
@@ -1,786 +0,0 @@
|
||||
import { createHttpProxyServer } from './http-proxy.js';
|
||||
import { createSocksProxyServer } from './socks-proxy.js';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
import { whichSync } from '../utils/which.js';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { getPlatform, getWslVersion } from '../utils/platform.js';
|
||||
import * as fs from 'fs';
|
||||
import { wrapCommandWithSandboxLinux, initializeLinuxNetworkBridge, checkLinuxDependencies, cleanupBwrapMountPoints, } from './linux-sandbox-utils.js';
|
||||
import { wrapCommandWithSandboxMacOS, startMacOSSandboxLogMonitor, } from './macos-sandbox-utils.js';
|
||||
import { getDefaultWritePaths, containsGlobChars, removeTrailingGlobSuffix, expandGlobPattern, } from './sandbox-utils.js';
|
||||
import { SandboxViolationStore } from './sandbox-violation-store.js';
|
||||
import { EOL } from 'node:os';
|
||||
// ============================================================================
|
||||
// Private Module State
|
||||
// ============================================================================
|
||||
let config;
|
||||
let httpProxyServer;
|
||||
let socksProxyServer;
|
||||
let managerContext;
|
||||
let initializationPromise;
|
||||
let cleanupRegistered = false;
|
||||
let logMonitorShutdown;
|
||||
const sandboxViolationStore = new SandboxViolationStore();
|
||||
// ============================================================================
|
||||
// Private Helper Functions (not exported)
|
||||
// ============================================================================
|
||||
function registerCleanup() {
|
||||
if (cleanupRegistered) {
|
||||
return;
|
||||
}
|
||||
const cleanupHandler = () => reset().catch(e => {
|
||||
logForDebugging(`Cleanup failed in registerCleanup ${e}`, {
|
||||
level: 'error',
|
||||
});
|
||||
});
|
||||
process.once('exit', cleanupHandler);
|
||||
process.once('SIGINT', cleanupHandler);
|
||||
process.once('SIGTERM', cleanupHandler);
|
||||
cleanupRegistered = true;
|
||||
}
|
||||
function matchesDomainPattern(hostname, pattern) {
|
||||
// Support wildcard patterns like *.example.com
|
||||
// This matches any subdomain but not the base domain itself
|
||||
if (pattern.startsWith('*.')) {
|
||||
const baseDomain = pattern.substring(2); // Remove '*.'
|
||||
return hostname.toLowerCase().endsWith('.' + baseDomain.toLowerCase());
|
||||
}
|
||||
// Exact match for non-wildcard patterns
|
||||
return hostname.toLowerCase() === pattern.toLowerCase();
|
||||
}
|
||||
async function filterNetworkRequest(port, host, sandboxAskCallback) {
|
||||
if (!config) {
|
||||
logForDebugging('No config available, denying network request');
|
||||
return false;
|
||||
}
|
||||
// Check denied domains first
|
||||
for (const deniedDomain of config.network.deniedDomains) {
|
||||
if (matchesDomainPattern(host, deniedDomain)) {
|
||||
logForDebugging(`Denied by config rule: ${host}:${port}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Check allowed domains
|
||||
for (const allowedDomain of config.network.allowedDomains) {
|
||||
if (matchesDomainPattern(host, allowedDomain)) {
|
||||
logForDebugging(`Allowed by config rule: ${host}:${port}`);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// No matching rules - ask user or deny
|
||||
if (!sandboxAskCallback) {
|
||||
logForDebugging(`No matching config rule, denying: ${host}:${port}`);
|
||||
return false;
|
||||
}
|
||||
logForDebugging(`No matching config rule, asking user: ${host}:${port}`);
|
||||
try {
|
||||
const userAllowed = await sandboxAskCallback({ host, port });
|
||||
if (userAllowed) {
|
||||
logForDebugging(`User allowed: ${host}:${port}`);
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
logForDebugging(`User denied: ${host}:${port}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
logForDebugging(`Error in permission callback: ${error}`, {
|
||||
level: 'error',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the MITM proxy socket path for a given host, if configured.
|
||||
* Returns the socket path if the host matches any MITM domain pattern,
|
||||
* otherwise returns undefined.
|
||||
*/
|
||||
function getMitmSocketPath(host) {
|
||||
if (!config?.network.mitmProxy) {
|
||||
return undefined;
|
||||
}
|
||||
const { socketPath, domains } = config.network.mitmProxy;
|
||||
for (const pattern of domains) {
|
||||
if (matchesDomainPattern(host, pattern)) {
|
||||
logForDebugging(`Host ${host} matches MITM pattern ${pattern}`);
|
||||
return socketPath;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
async function startHttpProxyServer(sandboxAskCallback) {
|
||||
httpProxyServer = createHttpProxyServer({
|
||||
filter: (port, host) => filterNetworkRequest(port, host, sandboxAskCallback),
|
||||
getMitmSocketPath,
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!httpProxyServer) {
|
||||
reject(new Error('HTTP proxy server undefined before listen'));
|
||||
return;
|
||||
}
|
||||
const server = httpProxyServer;
|
||||
server.once('error', reject);
|
||||
server.once('listening', () => {
|
||||
const address = server.address();
|
||||
if (address && typeof address === 'object') {
|
||||
server.unref();
|
||||
logForDebugging(`HTTP proxy listening on localhost:${address.port}`);
|
||||
resolve(address.port);
|
||||
}
|
||||
else {
|
||||
reject(new Error('Failed to get proxy server address'));
|
||||
}
|
||||
});
|
||||
server.listen(0, '127.0.0.1');
|
||||
});
|
||||
}
|
||||
async function startSocksProxyServer(sandboxAskCallback) {
|
||||
socksProxyServer = createSocksProxyServer({
|
||||
filter: (port, host) => filterNetworkRequest(port, host, sandboxAskCallback),
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!socksProxyServer) {
|
||||
// This is mostly just for the typechecker
|
||||
reject(new Error('SOCKS proxy server undefined before listen'));
|
||||
return;
|
||||
}
|
||||
socksProxyServer
|
||||
.listen(0, '127.0.0.1')
|
||||
.then((port) => {
|
||||
socksProxyServer?.unref();
|
||||
resolve(port);
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
}
|
||||
// ============================================================================
|
||||
// Public Module Functions (will be exported via namespace)
|
||||
// ============================================================================
|
||||
async function initialize(runtimeConfig, sandboxAskCallback, enableLogMonitor = false) {
|
||||
// Return if already initializing
|
||||
if (initializationPromise) {
|
||||
await initializationPromise;
|
||||
return;
|
||||
}
|
||||
// Store config for use by other functions
|
||||
config = runtimeConfig;
|
||||
// Check dependencies
|
||||
const deps = checkDependencies();
|
||||
if (deps.errors.length > 0) {
|
||||
throw new Error(`Sandbox dependencies not available: ${deps.errors.join(', ')}`);
|
||||
}
|
||||
// Start log monitor for macOS if enabled
|
||||
if (enableLogMonitor && getPlatform() === 'macos') {
|
||||
logMonitorShutdown = startMacOSSandboxLogMonitor(sandboxViolationStore.addViolation.bind(sandboxViolationStore), config.ignoreViolations);
|
||||
logForDebugging('Started macOS sandbox log monitor');
|
||||
}
|
||||
// Register cleanup handlers first time
|
||||
registerCleanup();
|
||||
// Initialize network infrastructure
|
||||
initializationPromise = (async () => {
|
||||
try {
|
||||
// Conditionally start proxy servers based on config
|
||||
let httpProxyPort;
|
||||
if (config.network.httpProxyPort !== undefined) {
|
||||
// Use external HTTP proxy (don't start a server)
|
||||
httpProxyPort = config.network.httpProxyPort;
|
||||
logForDebugging(`Using external HTTP proxy on port ${httpProxyPort}`);
|
||||
}
|
||||
else {
|
||||
// Start local HTTP proxy
|
||||
httpProxyPort = await startHttpProxyServer(sandboxAskCallback);
|
||||
}
|
||||
let socksProxyPort;
|
||||
if (config.network.socksProxyPort !== undefined) {
|
||||
// Use external SOCKS proxy (don't start a server)
|
||||
socksProxyPort = config.network.socksProxyPort;
|
||||
logForDebugging(`Using external SOCKS proxy on port ${socksProxyPort}`);
|
||||
}
|
||||
else {
|
||||
// Start local SOCKS proxy
|
||||
socksProxyPort = await startSocksProxyServer(sandboxAskCallback);
|
||||
}
|
||||
// Initialize platform-specific infrastructure
|
||||
let linuxBridge;
|
||||
if (getPlatform() === 'linux') {
|
||||
linuxBridge = await initializeLinuxNetworkBridge(httpProxyPort, socksProxyPort);
|
||||
}
|
||||
const context = {
|
||||
httpProxyPort,
|
||||
socksProxyPort,
|
||||
linuxBridge,
|
||||
};
|
||||
managerContext = context;
|
||||
logForDebugging('Network infrastructure initialized');
|
||||
return context;
|
||||
}
|
||||
catch (error) {
|
||||
// Clear state on error so initialization can be retried
|
||||
initializationPromise = undefined;
|
||||
managerContext = undefined;
|
||||
reset().catch(e => {
|
||||
logForDebugging(`Cleanup failed in initializationPromise ${e}`, {
|
||||
level: 'error',
|
||||
});
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
await initializationPromise;
|
||||
}
|
||||
function isSupportedPlatform() {
|
||||
const platform = getPlatform();
|
||||
if (platform === 'linux') {
|
||||
// WSL1 doesn't support bubblewrap
|
||||
return getWslVersion() !== '1';
|
||||
}
|
||||
return platform === 'macos';
|
||||
}
|
||||
function isSandboxingEnabled() {
|
||||
// Sandboxing is enabled if config has been set (via initialize())
|
||||
return config !== undefined;
|
||||
}
|
||||
/**
|
||||
* Check sandbox dependencies for the current platform
|
||||
* @param ripgrepConfig - Ripgrep command to check. If not provided, uses config from initialization or defaults to 'rg'
|
||||
* @returns { warnings, errors } - errors mean sandbox cannot run, warnings mean degraded functionality
|
||||
*/
|
||||
function checkDependencies(ripgrepConfig) {
|
||||
if (!isSupportedPlatform()) {
|
||||
return { errors: ['Unsupported platform'], warnings: [] };
|
||||
}
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
// Check ripgrep - use provided config, then initialized config, then default 'rg'
|
||||
const rgToCheck = ripgrepConfig ?? config?.ripgrep ?? { command: 'rg' };
|
||||
if (whichSync(rgToCheck.command) === null) {
|
||||
errors.push(`ripgrep (${rgToCheck.command}) not found`);
|
||||
}
|
||||
const platform = getPlatform();
|
||||
if (platform === 'linux') {
|
||||
const linuxDeps = checkLinuxDependencies(config?.seccomp);
|
||||
errors.push(...linuxDeps.errors);
|
||||
warnings.push(...linuxDeps.warnings);
|
||||
}
|
||||
return { errors, warnings };
|
||||
}
|
||||
function getFsReadConfig() {
|
||||
if (!config) {
|
||||
return { denyOnly: [], allowWithinDeny: [] };
|
||||
}
|
||||
const denyPaths = [];
|
||||
for (const p of config.filesystem.denyRead) {
|
||||
const stripped = removeTrailingGlobSuffix(p);
|
||||
if (getPlatform() === 'linux' && containsGlobChars(stripped)) {
|
||||
// Expand glob to concrete paths on Linux (bubblewrap doesn't support globs)
|
||||
const expanded = expandGlobPattern(p);
|
||||
logForDebugging(`[Sandbox] Expanded glob pattern "${p}" to ${expanded.length} paths on Linux`);
|
||||
denyPaths.push(...expanded);
|
||||
}
|
||||
else {
|
||||
denyPaths.push(stripped);
|
||||
}
|
||||
}
|
||||
// Process allowRead paths (re-allow within denied regions)
|
||||
const allowPaths = [];
|
||||
for (const p of config.filesystem.allowRead ?? []) {
|
||||
const stripped = removeTrailingGlobSuffix(p);
|
||||
if (getPlatform() === 'linux' && containsGlobChars(stripped)) {
|
||||
const expanded = expandGlobPattern(p);
|
||||
logForDebugging(`[Sandbox] Expanded allowRead glob pattern "${p}" to ${expanded.length} paths on Linux`);
|
||||
allowPaths.push(...expanded);
|
||||
}
|
||||
else {
|
||||
allowPaths.push(stripped);
|
||||
}
|
||||
}
|
||||
return {
|
||||
denyOnly: denyPaths,
|
||||
allowWithinDeny: allowPaths,
|
||||
};
|
||||
}
|
||||
function getFsWriteConfig() {
|
||||
if (!config) {
|
||||
return { allowOnly: getDefaultWritePaths(), denyWithinAllow: [] };
|
||||
}
|
||||
// Filter out glob patterns on Linux/WSL for allowWrite (bubblewrap doesn't support globs)
|
||||
const allowPaths = config.filesystem.allowWrite
|
||||
.map(path => removeTrailingGlobSuffix(path))
|
||||
.filter(path => {
|
||||
if (getPlatform() === 'linux' && containsGlobChars(path)) {
|
||||
logForDebugging(`Skipping glob pattern on Linux/WSL: ${path}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
// Filter out glob patterns on Linux/WSL for denyWrite (bubblewrap doesn't support globs)
|
||||
const denyPaths = config.filesystem.denyWrite
|
||||
.map(path => removeTrailingGlobSuffix(path))
|
||||
.filter(path => {
|
||||
if (getPlatform() === 'linux' && containsGlobChars(path)) {
|
||||
logForDebugging(`Skipping glob pattern on Linux/WSL: ${path}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
// Build allowOnly list: default paths + configured allow paths
|
||||
const allowOnly = [...getDefaultWritePaths(), ...allowPaths];
|
||||
return {
|
||||
allowOnly,
|
||||
denyWithinAllow: denyPaths,
|
||||
};
|
||||
}
|
||||
function getNetworkRestrictionConfig() {
|
||||
if (!config) {
|
||||
return {};
|
||||
}
|
||||
const allowedHosts = config.network.allowedDomains;
|
||||
const deniedHosts = config.network.deniedDomains;
|
||||
return {
|
||||
...(allowedHosts.length > 0 && { allowedHosts }),
|
||||
...(deniedHosts.length > 0 && { deniedHosts }),
|
||||
};
|
||||
}
|
||||
function getAllowUnixSockets() {
|
||||
return config?.network?.allowUnixSockets;
|
||||
}
|
||||
function getAllowAllUnixSockets() {
|
||||
return config?.network?.allowAllUnixSockets;
|
||||
}
|
||||
function getAllowLocalBinding() {
|
||||
return config?.network?.allowLocalBinding;
|
||||
}
|
||||
function getIgnoreViolations() {
|
||||
return config?.ignoreViolations;
|
||||
}
|
||||
function getEnableWeakerNestedSandbox() {
|
||||
return config?.enableWeakerNestedSandbox;
|
||||
}
|
||||
function getEnableWeakerNetworkIsolation() {
|
||||
return config?.enableWeakerNetworkIsolation;
|
||||
}
|
||||
function getRipgrepConfig() {
|
||||
return config?.ripgrep ?? { command: 'rg' };
|
||||
}
|
||||
function getMandatoryDenySearchDepth() {
|
||||
return config?.mandatoryDenySearchDepth ?? 3;
|
||||
}
|
||||
function getAllowGitConfig() {
|
||||
return config?.filesystem?.allowGitConfig ?? false;
|
||||
}
|
||||
function getSeccompConfig() {
|
||||
return config?.seccomp;
|
||||
}
|
||||
function getProxyPort() {
|
||||
return managerContext?.httpProxyPort;
|
||||
}
|
||||
function getSocksProxyPort() {
|
||||
return managerContext?.socksProxyPort;
|
||||
}
|
||||
function getLinuxHttpSocketPath() {
|
||||
return managerContext?.linuxBridge?.httpSocketPath;
|
||||
}
|
||||
function getLinuxSocksSocketPath() {
|
||||
return managerContext?.linuxBridge?.socksSocketPath;
|
||||
}
|
||||
/**
|
||||
* Wait for network initialization to complete if already in progress
|
||||
* Returns true if initialized successfully, false otherwise
|
||||
*/
|
||||
async function waitForNetworkInitialization() {
|
||||
if (!config) {
|
||||
return false;
|
||||
}
|
||||
if (initializationPromise) {
|
||||
try {
|
||||
await initializationPromise;
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return managerContext !== undefined;
|
||||
}
|
||||
async function wrapWithSandbox(command, binShell, customConfig, abortSignal) {
|
||||
const platform = getPlatform();
|
||||
// Get configs - use custom if provided, otherwise fall back to main config
|
||||
// If neither exists, defaults to empty arrays (most restrictive)
|
||||
// Always include default system write paths (like /dev/null, /tmp/claude)
|
||||
//
|
||||
// Strip trailing /** and filter remaining globs on Linux (bwrap needs
|
||||
// real paths, not globs; macOS subpath matching is also recursive so
|
||||
// stripping is harmless there).
|
||||
const stripWriteGlobs = (paths) => paths
|
||||
.map(p => removeTrailingGlobSuffix(p))
|
||||
.filter(p => {
|
||||
if (getPlatform() === 'linux' && containsGlobChars(p)) {
|
||||
logForDebugging(`[Sandbox] Skipping glob write pattern on Linux: ${p}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
const userAllowWrite = stripWriteGlobs(customConfig?.filesystem?.allowWrite ?? config?.filesystem.allowWrite ?? []);
|
||||
const writeConfig = {
|
||||
allowOnly: [...getDefaultWritePaths(), ...userAllowWrite],
|
||||
denyWithinAllow: stripWriteGlobs(customConfig?.filesystem?.denyWrite ?? config?.filesystem.denyWrite ?? []),
|
||||
};
|
||||
const rawDenyRead = customConfig?.filesystem?.denyRead ?? config?.filesystem.denyRead ?? [];
|
||||
const expandedDenyRead = [];
|
||||
for (const p of rawDenyRead) {
|
||||
const stripped = removeTrailingGlobSuffix(p);
|
||||
if (getPlatform() === 'linux' && containsGlobChars(stripped)) {
|
||||
expandedDenyRead.push(...expandGlobPattern(p));
|
||||
}
|
||||
else {
|
||||
expandedDenyRead.push(stripped);
|
||||
}
|
||||
}
|
||||
const rawAllowRead = customConfig?.filesystem?.allowRead ?? config?.filesystem.allowRead ?? [];
|
||||
const expandedAllowRead = [];
|
||||
for (const p of rawAllowRead) {
|
||||
const stripped = removeTrailingGlobSuffix(p);
|
||||
if (getPlatform() === 'linux' && containsGlobChars(stripped)) {
|
||||
expandedAllowRead.push(...expandGlobPattern(p));
|
||||
}
|
||||
else {
|
||||
expandedAllowRead.push(stripped);
|
||||
}
|
||||
}
|
||||
const readConfig = {
|
||||
denyOnly: expandedDenyRead,
|
||||
allowWithinDeny: expandedAllowRead,
|
||||
};
|
||||
// Check if network config is specified - this determines if we need network restrictions
|
||||
// Network restriction is needed when:
|
||||
// 1. customConfig has network.allowedDomains defined (even if empty array = block all)
|
||||
// 2. OR config has network.allowedDomains defined (even if empty array = block all)
|
||||
// An empty allowedDomains array means "no domains allowed" = block all network access
|
||||
const hasNetworkConfig = customConfig?.network?.allowedDomains !== undefined ||
|
||||
config?.network?.allowedDomains !== undefined;
|
||||
// Network RESTRICTION is needed whenever network config is specified
|
||||
// This includes empty allowedDomains which means "block all network"
|
||||
const needsNetworkRestriction = hasNetworkConfig;
|
||||
// Network PROXY is needed whenever network config is specified
|
||||
// Even with empty allowedDomains, we route through proxy so that:
|
||||
// 1. updateConfig() can enable network access for already-running processes
|
||||
// 2. The proxy blocks all requests when allowlist is empty
|
||||
const needsNetworkProxy = hasNetworkConfig;
|
||||
// Wait for network initialization only if proxy is actually needed
|
||||
if (needsNetworkProxy) {
|
||||
await waitForNetworkInitialization();
|
||||
}
|
||||
// Check custom config to allow pseudo-terminal (can be applied dynamically)
|
||||
const allowPty = customConfig?.allowPty ?? config?.allowPty;
|
||||
switch (platform) {
|
||||
case 'macos':
|
||||
// macOS sandbox profile supports glob patterns directly, no ripgrep needed
|
||||
return wrapCommandWithSandboxMacOS({
|
||||
command,
|
||||
needsNetworkRestriction,
|
||||
// Only pass proxy ports if proxy is running (when there are domains to filter)
|
||||
httpProxyPort: needsNetworkProxy ? getProxyPort() : undefined,
|
||||
socksProxyPort: needsNetworkProxy ? getSocksProxyPort() : undefined,
|
||||
readConfig,
|
||||
writeConfig,
|
||||
allowUnixSockets: getAllowUnixSockets(),
|
||||
allowAllUnixSockets: getAllowAllUnixSockets(),
|
||||
allowLocalBinding: getAllowLocalBinding(),
|
||||
ignoreViolations: getIgnoreViolations(),
|
||||
allowPty,
|
||||
allowGitConfig: getAllowGitConfig(),
|
||||
enableWeakerNetworkIsolation: getEnableWeakerNetworkIsolation(),
|
||||
binShell,
|
||||
});
|
||||
case 'linux':
|
||||
return wrapCommandWithSandboxLinux({
|
||||
command,
|
||||
needsNetworkRestriction,
|
||||
// Only pass socket paths if proxy is running (when there are domains to filter)
|
||||
httpSocketPath: needsNetworkProxy
|
||||
? getLinuxHttpSocketPath()
|
||||
: undefined,
|
||||
socksSocketPath: needsNetworkProxy
|
||||
? getLinuxSocksSocketPath()
|
||||
: undefined,
|
||||
httpProxyPort: needsNetworkProxy
|
||||
? managerContext?.httpProxyPort
|
||||
: undefined,
|
||||
socksProxyPort: needsNetworkProxy
|
||||
? managerContext?.socksProxyPort
|
||||
: undefined,
|
||||
readConfig,
|
||||
writeConfig,
|
||||
enableWeakerNestedSandbox: getEnableWeakerNestedSandbox(),
|
||||
allowAllUnixSockets: getAllowAllUnixSockets(),
|
||||
binShell,
|
||||
ripgrepConfig: getRipgrepConfig(),
|
||||
mandatoryDenySearchDepth: getMandatoryDenySearchDepth(),
|
||||
allowGitConfig: getAllowGitConfig(),
|
||||
seccompConfig: getSeccompConfig(),
|
||||
abortSignal,
|
||||
});
|
||||
default:
|
||||
// Unsupported platform - this should not happen since isSandboxingEnabled() checks platform support
|
||||
throw new Error(`Sandbox configuration is not supported on platform: ${platform}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the current sandbox configuration
|
||||
* @returns The current configuration, or undefined if not initialized
|
||||
*/
|
||||
function getConfig() {
|
||||
return config;
|
||||
}
|
||||
/**
|
||||
* Update the sandbox configuration
|
||||
* @param newConfig - The new configuration to use
|
||||
*/
|
||||
function updateConfig(newConfig) {
|
||||
// Deep clone the config to avoid mutations
|
||||
config = cloneDeep(newConfig);
|
||||
logForDebugging('Sandbox configuration updated');
|
||||
}
|
||||
/**
|
||||
* Lightweight cleanup to call after each sandboxed command completes.
|
||||
*
|
||||
* On Linux, bwrap creates empty files on the host filesystem as mount points
|
||||
* when protecting non-existent deny paths (e.g. ~/.bashrc, ~/.gitconfig).
|
||||
* These persist after bwrap exits. This function removes them.
|
||||
*
|
||||
* Safe to call on any platform — it's a no-op on macOS.
|
||||
* Also called automatically by reset() and on process exit as safety nets.
|
||||
*/
|
||||
function cleanupAfterCommand() {
|
||||
cleanupBwrapMountPoints();
|
||||
}
|
||||
async function reset() {
|
||||
// Clean up any leftover bwrap mount points
|
||||
cleanupAfterCommand();
|
||||
// Stop log monitor
|
||||
if (logMonitorShutdown) {
|
||||
logMonitorShutdown();
|
||||
logMonitorShutdown = undefined;
|
||||
}
|
||||
if (managerContext?.linuxBridge) {
|
||||
const { httpSocketPath, socksSocketPath, httpBridgeProcess, socksBridgeProcess, } = managerContext.linuxBridge;
|
||||
// Create array to wait for process exits
|
||||
const exitPromises = [];
|
||||
// Kill HTTP bridge and wait for it to exit
|
||||
if (httpBridgeProcess.pid && !httpBridgeProcess.killed) {
|
||||
try {
|
||||
process.kill(httpBridgeProcess.pid, 'SIGTERM');
|
||||
logForDebugging('Sent SIGTERM to HTTP bridge process');
|
||||
// Wait for process to exit
|
||||
exitPromises.push(new Promise(resolve => {
|
||||
httpBridgeProcess.once('exit', () => {
|
||||
logForDebugging('HTTP bridge process exited');
|
||||
resolve();
|
||||
});
|
||||
// Timeout after 5 seconds
|
||||
setTimeout(() => {
|
||||
if (!httpBridgeProcess.killed) {
|
||||
logForDebugging('HTTP bridge did not exit, forcing SIGKILL', {
|
||||
level: 'warn',
|
||||
});
|
||||
try {
|
||||
if (httpBridgeProcess.pid) {
|
||||
process.kill(httpBridgeProcess.pid, 'SIGKILL');
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
}, 5000);
|
||||
}));
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ESRCH') {
|
||||
logForDebugging(`Error killing HTTP bridge: ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Kill SOCKS bridge and wait for it to exit
|
||||
if (socksBridgeProcess.pid && !socksBridgeProcess.killed) {
|
||||
try {
|
||||
process.kill(socksBridgeProcess.pid, 'SIGTERM');
|
||||
logForDebugging('Sent SIGTERM to SOCKS bridge process');
|
||||
// Wait for process to exit
|
||||
exitPromises.push(new Promise(resolve => {
|
||||
socksBridgeProcess.once('exit', () => {
|
||||
logForDebugging('SOCKS bridge process exited');
|
||||
resolve();
|
||||
});
|
||||
// Timeout after 5 seconds
|
||||
setTimeout(() => {
|
||||
if (!socksBridgeProcess.killed) {
|
||||
logForDebugging('SOCKS bridge did not exit, forcing SIGKILL', {
|
||||
level: 'warn',
|
||||
});
|
||||
try {
|
||||
if (socksBridgeProcess.pid) {
|
||||
process.kill(socksBridgeProcess.pid, 'SIGKILL');
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
}, 5000);
|
||||
}));
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ESRCH') {
|
||||
logForDebugging(`Error killing SOCKS bridge: ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Wait for both processes to exit
|
||||
await Promise.all(exitPromises);
|
||||
// Clean up sockets
|
||||
if (httpSocketPath) {
|
||||
try {
|
||||
fs.rmSync(httpSocketPath, { force: true });
|
||||
logForDebugging('Cleaned up HTTP socket');
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`HTTP socket cleanup error: ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
if (socksSocketPath) {
|
||||
try {
|
||||
fs.rmSync(socksSocketPath, { force: true });
|
||||
logForDebugging('Cleaned up SOCKS socket');
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`SOCKS socket cleanup error: ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Close servers in parallel (only if they exist, i.e., were started by us)
|
||||
const closePromises = [];
|
||||
if (httpProxyServer) {
|
||||
const server = httpProxyServer; // Capture reference to avoid TypeScript error
|
||||
const httpClose = new Promise(resolve => {
|
||||
server.close(error => {
|
||||
if (error && error.message !== 'Server is not running.') {
|
||||
logForDebugging(`Error closing HTTP proxy server: ${error.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
closePromises.push(httpClose);
|
||||
}
|
||||
if (socksProxyServer) {
|
||||
const socksClose = socksProxyServer.close().catch((error) => {
|
||||
logForDebugging(`Error closing SOCKS proxy server: ${error.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
});
|
||||
closePromises.push(socksClose);
|
||||
}
|
||||
// Wait for all servers to close
|
||||
await Promise.all(closePromises);
|
||||
// Clear references
|
||||
httpProxyServer = undefined;
|
||||
socksProxyServer = undefined;
|
||||
managerContext = undefined;
|
||||
initializationPromise = undefined;
|
||||
}
|
||||
function getSandboxViolationStore() {
|
||||
return sandboxViolationStore;
|
||||
}
|
||||
function annotateStderrWithSandboxFailures(command, stderr) {
|
||||
if (!config) {
|
||||
return stderr;
|
||||
}
|
||||
const violations = sandboxViolationStore.getViolationsForCommand(command);
|
||||
if (violations.length === 0) {
|
||||
return stderr;
|
||||
}
|
||||
let annotated = stderr;
|
||||
annotated += EOL + '<sandbox_violations>' + EOL;
|
||||
for (const violation of violations) {
|
||||
annotated += violation.line + EOL;
|
||||
}
|
||||
annotated += '</sandbox_violations>';
|
||||
return annotated;
|
||||
}
|
||||
/**
|
||||
* Returns glob patterns from Edit/Read permission rules that are not
|
||||
* fully supported on Linux. Returns empty array on macOS or when
|
||||
* sandboxing is disabled.
|
||||
*
|
||||
* Patterns ending with /** are excluded since they work as subpaths.
|
||||
*/
|
||||
function getLinuxGlobPatternWarnings() {
|
||||
// Only warn on Linux/WSL (bubblewrap doesn't support globs)
|
||||
// macOS supports glob patterns via regex conversion
|
||||
if (getPlatform() !== 'linux' || !config) {
|
||||
return [];
|
||||
}
|
||||
const globPatterns = [];
|
||||
// Check filesystem paths for glob patterns
|
||||
// Note: denyRead is excluded because globs are now expanded to concrete paths on Linux
|
||||
const allPaths = [
|
||||
...config.filesystem.allowWrite,
|
||||
...config.filesystem.denyWrite,
|
||||
];
|
||||
for (const path of allPaths) {
|
||||
// Strip trailing /** since that's just a subpath (directory and everything under it)
|
||||
const pathWithoutTrailingStar = removeTrailingGlobSuffix(path);
|
||||
// Only warn if there are still glob characters after removing trailing /**
|
||||
if (containsGlobChars(pathWithoutTrailingStar)) {
|
||||
globPatterns.push(path);
|
||||
}
|
||||
}
|
||||
return globPatterns;
|
||||
}
|
||||
// ============================================================================
|
||||
// Export as Namespace with Interface
|
||||
// ============================================================================
|
||||
/**
|
||||
* Global sandbox manager that handles both network and filesystem restrictions
|
||||
* for this session. This runs outside of the sandbox, on the host machine.
|
||||
*/
|
||||
export const SandboxManager = {
|
||||
initialize,
|
||||
isSupportedPlatform,
|
||||
isSandboxingEnabled,
|
||||
checkDependencies,
|
||||
getFsReadConfig,
|
||||
getFsWriteConfig,
|
||||
getNetworkRestrictionConfig,
|
||||
getAllowUnixSockets,
|
||||
getAllowLocalBinding,
|
||||
getIgnoreViolations,
|
||||
getEnableWeakerNestedSandbox,
|
||||
getProxyPort,
|
||||
getSocksProxyPort,
|
||||
getLinuxHttpSocketPath,
|
||||
getLinuxSocksSocketPath,
|
||||
waitForNetworkInitialization,
|
||||
wrapWithSandbox,
|
||||
cleanupAfterCommand,
|
||||
reset,
|
||||
getSandboxViolationStore,
|
||||
annotateStderrWithSandboxFailures,
|
||||
getLinuxGlobPatternWarnings,
|
||||
getConfig,
|
||||
updateConfig,
|
||||
};
|
||||
//# sourceMappingURL=sandbox-manager.js.map
|
||||
435
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-utils.js
generated
vendored
435
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-utils.js
generated
vendored
@@ -1,435 +0,0 @@
|
||||
import { homedir } from 'os';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import { getPlatform } from '../utils/platform.js';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
/**
|
||||
* Dangerous files that should be protected from writes.
|
||||
* These files can be used for code execution or data exfiltration.
|
||||
*/
|
||||
export const DANGEROUS_FILES = [
|
||||
'.gitconfig',
|
||||
'.gitmodules',
|
||||
'.bashrc',
|
||||
'.bash_profile',
|
||||
'.zshrc',
|
||||
'.zprofile',
|
||||
'.profile',
|
||||
'.ripgreprc',
|
||||
'.mcp.json',
|
||||
];
|
||||
/**
|
||||
* Dangerous directories that should be protected from writes.
|
||||
* These directories contain sensitive configuration or executable files.
|
||||
*/
|
||||
export const DANGEROUS_DIRECTORIES = ['.git', '.vscode', '.idea'];
|
||||
/**
|
||||
* Get the list of dangerous directories to deny writes to.
|
||||
* Excludes .git since we need it writable for git operations -
|
||||
* instead we block specific paths within .git (hooks and config).
|
||||
*/
|
||||
export function getDangerousDirectories() {
|
||||
return [
|
||||
...DANGEROUS_DIRECTORIES.filter(d => d !== '.git'),
|
||||
'.claude/commands',
|
||||
'.claude/agents',
|
||||
];
|
||||
}
|
||||
/**
|
||||
* Normalizes a path for case-insensitive comparison.
|
||||
* This prevents bypassing security checks using mixed-case paths on case-insensitive
|
||||
* filesystems (macOS/Windows) like `.cLauDe/Settings.locaL.json`.
|
||||
*
|
||||
* We always normalize to lowercase regardless of platform for consistent security.
|
||||
* @param path The path to normalize
|
||||
* @returns The lowercase path for safe comparison
|
||||
*/
|
||||
export function normalizeCaseForComparison(pathStr) {
|
||||
return pathStr.toLowerCase();
|
||||
}
|
||||
/**
|
||||
* Check if a path pattern contains glob characters
|
||||
*/
|
||||
export function containsGlobChars(pathPattern) {
|
||||
return (pathPattern.includes('*') ||
|
||||
pathPattern.includes('?') ||
|
||||
pathPattern.includes('[') ||
|
||||
pathPattern.includes(']'));
|
||||
}
|
||||
/**
|
||||
* Remove trailing /** glob suffix from a path pattern
|
||||
* Used to normalize path patterns since /** just means "directory and everything under it"
|
||||
*/
|
||||
export function removeTrailingGlobSuffix(pathPattern) {
|
||||
const stripped = pathPattern.replace(/\/\*\*$/, '');
|
||||
return stripped || '/';
|
||||
}
|
||||
/**
|
||||
* Check if a symlink resolution crosses expected path boundaries.
|
||||
*
|
||||
* When resolving symlinks for sandbox path normalization, we need to ensure
|
||||
* the resolved path doesn't unexpectedly broaden the scope. This function
|
||||
* returns true if the resolved path is an ancestor of the original path
|
||||
* or resolves to a system root, which would indicate the symlink points
|
||||
* outside expected boundaries.
|
||||
*
|
||||
* @param originalPath - The original path before symlink resolution
|
||||
* @param resolvedPath - The path after fs.realpathSync() resolution
|
||||
* @returns true if the resolved path is outside expected boundaries
|
||||
*/
|
||||
export function isSymlinkOutsideBoundary(originalPath, resolvedPath) {
|
||||
const normalizedOriginal = path.normalize(originalPath);
|
||||
const normalizedResolved = path.normalize(resolvedPath);
|
||||
// Same path after normalization - OK
|
||||
if (normalizedResolved === normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
// Handle macOS /tmp -> /private/tmp canonical resolution
|
||||
// This is a legitimate system symlink that should be allowed
|
||||
// /tmp/claude -> /private/tmp/claude is OK
|
||||
// /var/folders/... -> /private/var/folders/... is OK
|
||||
if (normalizedOriginal.startsWith('/tmp/') &&
|
||||
normalizedResolved === '/private' + normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
if (normalizedOriginal.startsWith('/var/') &&
|
||||
normalizedResolved === '/private' + normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
// Also handle the reverse: /private/tmp/... resolving to itself
|
||||
if (normalizedOriginal.startsWith('/private/tmp/') &&
|
||||
normalizedResolved === normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
if (normalizedOriginal.startsWith('/private/var/') &&
|
||||
normalizedResolved === normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
// If resolved path is "/" it's outside expected boundaries
|
||||
if (normalizedResolved === '/') {
|
||||
return true;
|
||||
}
|
||||
// If resolved path is very short (single component like /tmp, /usr, /var),
|
||||
// it's likely outside expected boundaries
|
||||
const resolvedParts = normalizedResolved.split('/').filter(Boolean);
|
||||
if (resolvedParts.length <= 1) {
|
||||
return true;
|
||||
}
|
||||
// If original path starts with resolved path, the resolved path is an ancestor
|
||||
// e.g., /tmp/claude -> /tmp means the symlink points to a broader scope
|
||||
if (normalizedOriginal.startsWith(normalizedResolved + '/')) {
|
||||
return true;
|
||||
}
|
||||
// Also check the canonical form of the original path for macOS
|
||||
// e.g., /tmp/claude should also be checked as /private/tmp/claude
|
||||
let canonicalOriginal = normalizedOriginal;
|
||||
if (normalizedOriginal.startsWith('/tmp/')) {
|
||||
canonicalOriginal = '/private' + normalizedOriginal;
|
||||
}
|
||||
else if (normalizedOriginal.startsWith('/var/')) {
|
||||
canonicalOriginal = '/private' + normalizedOriginal;
|
||||
}
|
||||
if (canonicalOriginal !== normalizedOriginal &&
|
||||
canonicalOriginal.startsWith(normalizedResolved + '/')) {
|
||||
return true;
|
||||
}
|
||||
// STRICT CHECK: Only allow resolutions that stay within the expected path tree
|
||||
// The resolved path must either:
|
||||
// 1. Start with the original path (deeper/same) - already covered by returning false below
|
||||
// 2. Start with the canonical original (deeper/same under canonical form)
|
||||
// 3. BE the canonical form of the original (e.g., /tmp/x -> /private/tmp/x)
|
||||
// Any other resolution (e.g., /tmp/claude -> /Users/dworken) is outside expected bounds
|
||||
const resolvedStartsWithOriginal = normalizedResolved.startsWith(normalizedOriginal + '/');
|
||||
const resolvedStartsWithCanonical = canonicalOriginal !== normalizedOriginal &&
|
||||
normalizedResolved.startsWith(canonicalOriginal + '/');
|
||||
const resolvedIsCanonical = canonicalOriginal !== normalizedOriginal &&
|
||||
normalizedResolved === canonicalOriginal;
|
||||
const resolvedIsSame = normalizedResolved === normalizedOriginal;
|
||||
// If resolved path is not within expected tree, it's outside boundary
|
||||
if (!resolvedIsSame &&
|
||||
!resolvedIsCanonical &&
|
||||
!resolvedStartsWithOriginal &&
|
||||
!resolvedStartsWithCanonical) {
|
||||
return true;
|
||||
}
|
||||
// Allow resolution to same directory level or deeper within expected tree
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Normalize a path for use in sandbox configurations
|
||||
* Handles:
|
||||
* - Tilde (~) expansion for home directory
|
||||
* - Relative paths (./foo, ../foo, etc.) converted to absolute
|
||||
* - Absolute paths remain unchanged
|
||||
* - Symlinks are resolved to their real paths for non-glob patterns
|
||||
* - Glob patterns preserve wildcards after path normalization
|
||||
*
|
||||
* Returns the absolute path with symlinks resolved (or normalized glob pattern)
|
||||
*/
|
||||
export function normalizePathForSandbox(pathPattern) {
|
||||
const cwd = process.cwd();
|
||||
let normalizedPath = pathPattern;
|
||||
// Expand ~ to home directory
|
||||
if (pathPattern === '~') {
|
||||
normalizedPath = homedir();
|
||||
}
|
||||
else if (pathPattern.startsWith('~/')) {
|
||||
normalizedPath = homedir() + pathPattern.slice(1);
|
||||
}
|
||||
else if (pathPattern.startsWith('./') || pathPattern.startsWith('../')) {
|
||||
// Convert relative to absolute based on current working directory
|
||||
normalizedPath = path.resolve(cwd, pathPattern);
|
||||
}
|
||||
else if (!path.isAbsolute(pathPattern)) {
|
||||
// Handle other relative paths (e.g., ".", "..", "foo/bar")
|
||||
normalizedPath = path.resolve(cwd, pathPattern);
|
||||
}
|
||||
// For glob patterns, resolve symlinks for the directory portion only
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Extract the static directory prefix before glob characters
|
||||
const staticPrefix = normalizedPath.split(/[*?[\]]/)[0];
|
||||
if (staticPrefix && staticPrefix !== '/') {
|
||||
// Get the directory containing the glob pattern
|
||||
// If staticPrefix ends with /, remove it to get the directory
|
||||
const baseDir = staticPrefix.endsWith('/')
|
||||
? staticPrefix.slice(0, -1)
|
||||
: path.dirname(staticPrefix);
|
||||
// Try to resolve symlinks for the base directory
|
||||
try {
|
||||
const resolvedBaseDir = fs.realpathSync(baseDir);
|
||||
// Validate that resolution stays within expected boundaries
|
||||
if (!isSymlinkOutsideBoundary(baseDir, resolvedBaseDir)) {
|
||||
// Reconstruct the pattern with the resolved directory
|
||||
const patternSuffix = normalizedPath.slice(baseDir.length);
|
||||
return resolvedBaseDir + patternSuffix;
|
||||
}
|
||||
// If resolution would broaden scope, keep original pattern
|
||||
}
|
||||
catch {
|
||||
// If directory doesn't exist or can't be resolved, keep the original pattern
|
||||
}
|
||||
}
|
||||
return normalizedPath;
|
||||
}
|
||||
// Resolve symlinks to real paths to avoid bwrap issues
|
||||
// Validate that the resolution stays within expected boundaries
|
||||
try {
|
||||
const resolvedPath = fs.realpathSync(normalizedPath);
|
||||
// Only use resolved path if it doesn't cross boundary (e.g., symlink to parent dir)
|
||||
if (isSymlinkOutsideBoundary(normalizedPath, resolvedPath)) {
|
||||
// Symlink points outside expected boundaries - keep original path
|
||||
}
|
||||
else {
|
||||
normalizedPath = resolvedPath;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// If path doesn't exist or can't be resolved, keep the normalized path
|
||||
}
|
||||
return normalizedPath;
|
||||
}
|
||||
/**
|
||||
* Get recommended system paths that should be writable for commands to work properly
|
||||
*
|
||||
* WARNING: These default paths are intentionally broad for compatibility but may
|
||||
* allow access to files from other processes. In highly security-sensitive
|
||||
* environments, you should configure more restrictive write paths.
|
||||
*/
|
||||
export function getDefaultWritePaths() {
|
||||
const homeDir = homedir();
|
||||
const recommendedPaths = [
|
||||
'/dev/stdout',
|
||||
'/dev/stderr',
|
||||
'/dev/null',
|
||||
'/dev/tty',
|
||||
'/dev/dtracehelper',
|
||||
'/dev/autofs_nowait',
|
||||
'/tmp/claude',
|
||||
'/private/tmp/claude',
|
||||
path.join(homeDir, '.npm/_logs'),
|
||||
path.join(homeDir, '.claude/debug'),
|
||||
];
|
||||
return recommendedPaths;
|
||||
}
|
||||
/**
|
||||
* Generate proxy environment variables for sandboxed processes
|
||||
*/
|
||||
export function generateProxyEnvVars(httpProxyPort, socksProxyPort) {
|
||||
// Respect CLAUDE_TMPDIR if set, otherwise default to /tmp/claude
|
||||
const tmpdir = process.env.CLAUDE_TMPDIR || '/tmp/claude';
|
||||
const envVars = [`SANDBOX_RUNTIME=1`, `TMPDIR=${tmpdir}`];
|
||||
// If no proxy ports provided, return minimal env vars
|
||||
if (!httpProxyPort && !socksProxyPort) {
|
||||
return envVars;
|
||||
}
|
||||
// Always set NO_PROXY to exclude localhost and private networks from proxying
|
||||
const noProxyAddresses = [
|
||||
'localhost',
|
||||
'127.0.0.1',
|
||||
'::1',
|
||||
'*.local',
|
||||
'.local',
|
||||
'169.254.0.0/16', // Link-local
|
||||
'10.0.0.0/8', // Private network
|
||||
'172.16.0.0/12', // Private network
|
||||
'192.168.0.0/16', // Private network
|
||||
].join(',');
|
||||
envVars.push(`NO_PROXY=${noProxyAddresses}`);
|
||||
envVars.push(`no_proxy=${noProxyAddresses}`);
|
||||
if (httpProxyPort) {
|
||||
envVars.push(`HTTP_PROXY=http://localhost:${httpProxyPort}`);
|
||||
envVars.push(`HTTPS_PROXY=http://localhost:${httpProxyPort}`);
|
||||
// Lowercase versions for compatibility with some tools
|
||||
envVars.push(`http_proxy=http://localhost:${httpProxyPort}`);
|
||||
envVars.push(`https_proxy=http://localhost:${httpProxyPort}`);
|
||||
}
|
||||
if (socksProxyPort) {
|
||||
// Use socks5h:// for proper DNS resolution through proxy
|
||||
envVars.push(`ALL_PROXY=socks5h://localhost:${socksProxyPort}`);
|
||||
envVars.push(`all_proxy=socks5h://localhost:${socksProxyPort}`);
|
||||
// Configure Git to use SSH through the proxy so DNS resolution happens outside the sandbox
|
||||
const platform = getPlatform();
|
||||
if (platform === 'macos') {
|
||||
// macOS: use BSD nc SOCKS5 proxy support (-X 5 -x)
|
||||
envVars.push(`GIT_SSH_COMMAND=ssh -o ProxyCommand='nc -X 5 -x localhost:${socksProxyPort} %h %p'`);
|
||||
}
|
||||
else if (platform === 'linux' && httpProxyPort) {
|
||||
// Linux: use socat HTTP CONNECT via the HTTP proxy bridge.
|
||||
// socat is already a required Linux sandbox dependency, and PROXY: is
|
||||
// portable across all socat versions (unlike SOCKS5-CONNECT which needs >= 1.8.0).
|
||||
envVars.push(`GIT_SSH_COMMAND=ssh -o ProxyCommand='socat - PROXY:localhost:%h:%p,proxyport=${httpProxyPort}'`);
|
||||
}
|
||||
// FTP proxy support (use socks5h for DNS resolution through proxy)
|
||||
envVars.push(`FTP_PROXY=socks5h://localhost:${socksProxyPort}`);
|
||||
envVars.push(`ftp_proxy=socks5h://localhost:${socksProxyPort}`);
|
||||
// rsync proxy support
|
||||
envVars.push(`RSYNC_PROXY=localhost:${socksProxyPort}`);
|
||||
// Database tools NOTE: Most database clients don't have built-in proxy support
|
||||
// You typically need to use SSH tunneling or a SOCKS wrapper like tsocks/proxychains
|
||||
// Docker CLI uses HTTP for the API
|
||||
// This makes Docker use the HTTP proxy for registry operations
|
||||
envVars.push(`DOCKER_HTTP_PROXY=http://localhost:${httpProxyPort || socksProxyPort}`);
|
||||
envVars.push(`DOCKER_HTTPS_PROXY=http://localhost:${httpProxyPort || socksProxyPort}`);
|
||||
// Kubernetes kubectl - uses standard HTTPS_PROXY
|
||||
// kubectl respects HTTPS_PROXY which we already set above
|
||||
// AWS CLI - uses standard HTTPS_PROXY (v2 supports it well)
|
||||
// AWS CLI v2 respects HTTPS_PROXY which we already set above
|
||||
// Google Cloud SDK - has specific proxy settings
|
||||
// Use HTTPS proxy to match other HTTP-based tools
|
||||
if (httpProxyPort) {
|
||||
envVars.push(`CLOUDSDK_PROXY_TYPE=https`);
|
||||
envVars.push(`CLOUDSDK_PROXY_ADDRESS=localhost`);
|
||||
envVars.push(`CLOUDSDK_PROXY_PORT=${httpProxyPort}`);
|
||||
}
|
||||
// Azure CLI - uses HTTPS_PROXY
|
||||
// Azure CLI respects HTTPS_PROXY which we already set above
|
||||
// Terraform - uses standard HTTP/HTTPS proxy vars
|
||||
// Terraform respects HTTP_PROXY/HTTPS_PROXY which we already set above
|
||||
// gRPC-based tools - use standard proxy vars
|
||||
envVars.push(`GRPC_PROXY=socks5h://localhost:${socksProxyPort}`);
|
||||
envVars.push(`grpc_proxy=socks5h://localhost:${socksProxyPort}`);
|
||||
}
|
||||
// WARNING: Do not set HTTP_PROXY/HTTPS_PROXY to SOCKS URLs when only SOCKS proxy is available
|
||||
// Most HTTP clients do not support SOCKS URLs in these variables and will fail, and we want
|
||||
// to avoid overriding the client otherwise respecting the ALL_PROXY env var which points to SOCKS.
|
||||
return envVars;
|
||||
}
|
||||
/**
|
||||
* Encode a command for sandbox monitoring
|
||||
* Truncates to 100 chars and base64 encodes to avoid parsing issues
|
||||
*/
|
||||
export function encodeSandboxedCommand(command) {
|
||||
const truncatedCommand = command.slice(0, 100);
|
||||
return Buffer.from(truncatedCommand).toString('base64');
|
||||
}
|
||||
/**
|
||||
* Decode a base64-encoded command from sandbox monitoring
|
||||
*/
|
||||
export function decodeSandboxedCommand(encodedCommand) {
|
||||
return Buffer.from(encodedCommand, 'base64').toString('utf8');
|
||||
}
|
||||
/**
|
||||
* Convert a glob pattern to a regular expression
|
||||
*
|
||||
* This implements gitignore-style pattern matching to match the behavior of the
|
||||
* `ignore` library used by the permission system.
|
||||
*
|
||||
* Supported patterns:
|
||||
* - * matches any characters except / (e.g., *.ts matches foo.ts but not foo/bar.ts)
|
||||
* - ** matches any characters including / (e.g., src/**\/*.ts matches all .ts files in src/)
|
||||
* - ? matches any single character except / (e.g., file?.txt matches file1.txt)
|
||||
* - [abc] matches any character in the set (e.g., file[0-9].txt matches file3.txt)
|
||||
*
|
||||
* Exported for testing and shared between macOS sandbox profiles and Linux glob expansion.
|
||||
*/
|
||||
export function globToRegex(globPattern) {
|
||||
return ('^' +
|
||||
globPattern
|
||||
// Escape regex special characters (except glob chars * ? [ ])
|
||||
.replace(/[.^$+{}()|\\]/g, '\\$&')
|
||||
// Escape unclosed brackets (no matching ])
|
||||
.replace(/\[([^\]]*?)$/g, '\\[$1')
|
||||
// Convert glob patterns to regex (order matters - ** before *)
|
||||
.replace(/\*\*\//g, '__GLOBSTAR_SLASH__') // Placeholder for **/
|
||||
.replace(/\*\*/g, '__GLOBSTAR__') // Placeholder for **
|
||||
.replace(/\*/g, '[^/]*') // * matches anything except /
|
||||
.replace(/\?/g, '[^/]') // ? matches single character except /
|
||||
// Restore placeholders
|
||||
.replace(/__GLOBSTAR_SLASH__/g, '(.*/)?') // **/ matches zero or more dirs
|
||||
.replace(/__GLOBSTAR__/g, '.*') + // ** matches anything including /
|
||||
'$');
|
||||
}
|
||||
/**
|
||||
* Expand a glob pattern into concrete file paths.
|
||||
*
|
||||
* Used on Linux where bubblewrap doesn't support glob patterns natively.
|
||||
* Resolves the static directory prefix, lists files recursively, and filters
|
||||
* using globToRegex().
|
||||
*
|
||||
* @param globPath - A path pattern containing glob characters (e.g., ~/test/*.env)
|
||||
* @returns Array of absolute paths matching the glob pattern
|
||||
*/
|
||||
export function expandGlobPattern(globPath) {
|
||||
const normalizedPattern = normalizePathForSandbox(globPath);
|
||||
// Extract the static directory prefix before any glob characters
|
||||
const staticPrefix = normalizedPattern.split(/[*?[\]]/)[0];
|
||||
if (!staticPrefix || staticPrefix === '/') {
|
||||
logForDebugging(`[Sandbox] Glob pattern too broad, skipping: ${globPath}`);
|
||||
return [];
|
||||
}
|
||||
// Get the base directory from the static prefix
|
||||
const baseDir = staticPrefix.endsWith('/')
|
||||
? staticPrefix.slice(0, -1)
|
||||
: path.dirname(staticPrefix);
|
||||
if (!fs.existsSync(baseDir)) {
|
||||
logForDebugging(`[Sandbox] Base directory for glob does not exist: ${baseDir}`);
|
||||
return [];
|
||||
}
|
||||
// Build regex from the normalized glob pattern
|
||||
const regex = new RegExp(globToRegex(normalizedPattern));
|
||||
// List all entries recursively under the base directory
|
||||
const results = [];
|
||||
try {
|
||||
const entries = fs.readdirSync(baseDir, {
|
||||
recursive: true,
|
||||
withFileTypes: true,
|
||||
});
|
||||
for (const entry of entries) {
|
||||
// Build the full path for this entry
|
||||
// entry.parentPath is the directory containing this entry (available in Node 20+/Bun)
|
||||
// For compatibility, fall back to entry.path if parentPath is not available
|
||||
const parentDir = entry.parentPath ??
|
||||
entry.path ??
|
||||
baseDir;
|
||||
const fullPath = path.join(parentDir, entry.name);
|
||||
if (regex.test(fullPath)) {
|
||||
results.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`[Sandbox] Error expanding glob pattern ${globPath}: ${err}`);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
//# sourceMappingURL=sandbox-utils.js.map
|
||||
@@ -1,54 +0,0 @@
|
||||
import { encodeSandboxedCommand } from './sandbox-utils.js';
|
||||
/**
|
||||
* In-memory tail for sandbox violations
|
||||
*/
|
||||
export class SandboxViolationStore {
|
||||
constructor() {
|
||||
this.violations = [];
|
||||
this.totalCount = 0;
|
||||
this.maxSize = 100;
|
||||
this.listeners = new Set();
|
||||
}
|
||||
addViolation(violation) {
|
||||
this.violations.push(violation);
|
||||
this.totalCount++;
|
||||
if (this.violations.length > this.maxSize) {
|
||||
this.violations = this.violations.slice(-this.maxSize);
|
||||
}
|
||||
this.notifyListeners();
|
||||
}
|
||||
getViolations(limit) {
|
||||
if (limit === undefined) {
|
||||
return [...this.violations];
|
||||
}
|
||||
return this.violations.slice(-limit);
|
||||
}
|
||||
getCount() {
|
||||
return this.violations.length;
|
||||
}
|
||||
getTotalCount() {
|
||||
return this.totalCount;
|
||||
}
|
||||
getViolationsForCommand(command) {
|
||||
const commandBase64 = encodeSandboxedCommand(command);
|
||||
return this.violations.filter(v => v.encodedCommand === commandBase64);
|
||||
}
|
||||
clear() {
|
||||
this.violations = [];
|
||||
// Don't reset totalCount when clearing
|
||||
this.notifyListeners();
|
||||
}
|
||||
subscribe(listener) {
|
||||
this.listeners.add(listener);
|
||||
listener(this.getViolations());
|
||||
return () => {
|
||||
this.listeners.delete(listener);
|
||||
};
|
||||
}
|
||||
notifyListeners() {
|
||||
// Always notify with all violations so listeners can track the full count
|
||||
const violations = this.getViolations();
|
||||
this.listeners.forEach(listener => listener(violations));
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=sandbox-violation-store.js.map
|
||||
95
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/socks-proxy.js
generated
vendored
95
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/socks-proxy.js
generated
vendored
@@ -1,95 +0,0 @@
|
||||
import { createServer } from '@pondwader/socks5-server';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
export function createSocksProxyServer(options) {
|
||||
const socksServer = createServer();
|
||||
socksServer.setRulesetValidator(async (conn) => {
|
||||
try {
|
||||
const hostname = conn.destAddress;
|
||||
const port = conn.destPort;
|
||||
logForDebugging(`Connection request to ${hostname}:${port}`);
|
||||
const allowed = await options.filter(port, hostname);
|
||||
if (!allowed) {
|
||||
logForDebugging(`Connection blocked to ${hostname}:${port}`, {
|
||||
level: 'error',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
logForDebugging(`Connection allowed to ${hostname}:${port}`);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
logForDebugging(`Error validating connection: ${error}`, {
|
||||
level: 'error',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
});
|
||||
return {
|
||||
server: socksServer,
|
||||
getPort() {
|
||||
// Access the internal server to get the port
|
||||
// We need to use type assertion here as the server property is private
|
||||
try {
|
||||
const serverInternal = socksServer?.server;
|
||||
if (serverInternal && typeof serverInternal?.address === 'function') {
|
||||
const address = serverInternal.address();
|
||||
if (address && typeof address === 'object' && 'port' in address) {
|
||||
return address.port;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Server might not be listening yet or property access failed
|
||||
logForDebugging(`Error getting port: ${error}`, { level: 'error' });
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
listen(port, hostname) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const listeningCallback = () => {
|
||||
const actualPort = this.getPort();
|
||||
if (actualPort) {
|
||||
logForDebugging(`SOCKS proxy listening on ${hostname}:${actualPort}`);
|
||||
resolve(actualPort);
|
||||
}
|
||||
else {
|
||||
reject(new Error('Failed to get SOCKS proxy server port'));
|
||||
}
|
||||
};
|
||||
socksServer.listen(port, hostname, listeningCallback);
|
||||
});
|
||||
},
|
||||
async close() {
|
||||
return new Promise((resolve, reject) => {
|
||||
socksServer.close(error => {
|
||||
if (error) {
|
||||
// Only reject for actual errors, not for "already closed" states
|
||||
// Check for common "already closed" error patterns
|
||||
const errorMessage = error.message?.toLowerCase() || '';
|
||||
const isAlreadyClosed = errorMessage.includes('not running') ||
|
||||
errorMessage.includes('already closed') ||
|
||||
errorMessage.includes('not listening');
|
||||
if (!isAlreadyClosed) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
},
|
||||
unref() {
|
||||
// Access the internal server to call unref
|
||||
try {
|
||||
const serverInternal = socksServer?.server;
|
||||
if (serverInternal && typeof serverInternal?.unref === 'function') {
|
||||
serverInternal.unref();
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
logForDebugging(`Error calling unref: ${error}`, { level: 'error' });
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=socks-proxy.js.map
|
||||
25
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/debug.js
generated
vendored
25
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/debug.js
generated
vendored
@@ -1,25 +0,0 @@
|
||||
/**
|
||||
* Simple debug logging for standalone sandbox
|
||||
*/
|
||||
export function logForDebugging(message, options) {
|
||||
// Only log if SRT_DEBUG environment variable is set
|
||||
// Using SRT_DEBUG instead of DEBUG to avoid conflicts with other tools
|
||||
// (DEBUG is commonly used by Node.js debug libraries and VS Code)
|
||||
if (!process.env.SRT_DEBUG) {
|
||||
return;
|
||||
}
|
||||
const level = options?.level || 'info';
|
||||
const prefix = '[SandboxDebug]';
|
||||
// Always use stderr to avoid corrupting stdout JSON streams
|
||||
switch (level) {
|
||||
case 'error':
|
||||
console.error(`${prefix} ${message}`);
|
||||
break;
|
||||
case 'warn':
|
||||
console.warn(`${prefix} ${message}`);
|
||||
break;
|
||||
default:
|
||||
console.error(`${prefix} ${message}`);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=debug.js.map
|
||||
49
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/platform.js
generated
vendored
49
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/platform.js
generated
vendored
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* Platform detection utilities
|
||||
*/
|
||||
import * as fs from 'fs';
|
||||
/**
|
||||
* Get the WSL version (1 or 2+) if running in WSL.
|
||||
* Returns undefined if not running in WSL.
|
||||
*/
|
||||
export function getWslVersion() {
|
||||
if (process.platform !== 'linux') {
|
||||
return undefined;
|
||||
}
|
||||
try {
|
||||
const procVersion = fs.readFileSync('/proc/version', { encoding: 'utf8' });
|
||||
// Check for explicit WSL version markers (e.g., "WSL2", "WSL3", etc.)
|
||||
const wslVersionMatch = procVersion.match(/WSL(\d+)/i);
|
||||
if (wslVersionMatch && wslVersionMatch[1]) {
|
||||
return wslVersionMatch[1];
|
||||
}
|
||||
// If no explicit WSL version but contains Microsoft, assume WSL1
|
||||
// This handles the original WSL1 format: "4.4.0-19041-Microsoft"
|
||||
if (procVersion.toLowerCase().includes('microsoft')) {
|
||||
return '1';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Detect the current platform.
|
||||
* Note: All Linux including WSL returns 'linux'. Use getWslVersion() to detect WSL1 (unsupported).
|
||||
*/
|
||||
export function getPlatform() {
|
||||
switch (process.platform) {
|
||||
case 'darwin':
|
||||
return 'macos';
|
||||
case 'linux':
|
||||
// WSL2+ is treated as Linux (same sandboxing)
|
||||
// WSL1 is also returned as 'linux' but will fail isSupportedPlatform check
|
||||
return 'linux';
|
||||
case 'win32':
|
||||
return 'windows';
|
||||
default:
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=platform.js.map
|
||||
45
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/ripgrep.js
generated
vendored
45
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/ripgrep.js
generated
vendored
@@ -1,45 +0,0 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { text } from 'node:stream/consumers';
|
||||
import { whichSync } from './which.js';
|
||||
/**
|
||||
* Check if ripgrep (rg) is available synchronously
|
||||
* Returns true if rg is installed, false otherwise
|
||||
*/
|
||||
export function hasRipgrepSync() {
|
||||
return whichSync('rg') !== null;
|
||||
}
|
||||
/**
|
||||
* Execute ripgrep with the given arguments
|
||||
* @param args Command-line arguments to pass to rg
|
||||
* @param target Target directory or file to search
|
||||
* @param abortSignal AbortSignal to cancel the operation
|
||||
* @param config Ripgrep configuration (command and optional args)
|
||||
* @returns Array of matching lines (one per line of output)
|
||||
* @throws Error if ripgrep exits with non-zero status (except exit code 1 which means no matches)
|
||||
*/
|
||||
export async function ripGrep(args, target, abortSignal, config = { command: 'rg' }) {
|
||||
const { command, args: commandArgs = [], argv0 } = config;
|
||||
const child = spawn(command, [...commandArgs, ...args, target], {
|
||||
argv0,
|
||||
signal: abortSignal,
|
||||
timeout: 10000,
|
||||
windowsHide: true,
|
||||
});
|
||||
const [stdout, stderr, code] = await Promise.all([
|
||||
text(child.stdout),
|
||||
text(child.stderr),
|
||||
new Promise((resolve, reject) => {
|
||||
child.on('close', resolve);
|
||||
child.on('error', reject);
|
||||
}),
|
||||
]);
|
||||
if (code === 0) {
|
||||
return stdout.trim().split('\n').filter(Boolean);
|
||||
}
|
||||
if (code === 1) {
|
||||
// Exit code 1 means "no matches found" - this is normal
|
||||
return [];
|
||||
}
|
||||
throw new Error(`ripgrep failed with exit code ${code}: ${stderr}`);
|
||||
}
|
||||
//# sourceMappingURL=ripgrep.js.map
|
||||
25
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/which.js
generated
vendored
25
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/which.js
generated
vendored
@@ -1,25 +0,0 @@
|
||||
import { spawnSync } from 'node:child_process';
|
||||
/**
|
||||
* Find the path to an executable, similar to the `which` command.
|
||||
* Uses Bun.which when running in Bun, falls back to spawnSync for Node.js.
|
||||
*
|
||||
* @param bin - The name of the executable to find
|
||||
* @returns The full path to the executable, or null if not found
|
||||
*/
|
||||
export function whichSync(bin) {
|
||||
// Check if we're running in Bun
|
||||
if (typeof globalThis.Bun !== 'undefined') {
|
||||
return globalThis.Bun.which(bin);
|
||||
}
|
||||
// Fallback to Node.js implementation
|
||||
const result = spawnSync('which', [bin], {
|
||||
encoding: 'utf8',
|
||||
stdio: ['ignore', 'pipe', 'ignore'],
|
||||
timeout: 1000,
|
||||
});
|
||||
if (result.status === 0 && result.stdout) {
|
||||
return result.stdout.trim();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
//# sourceMappingURL=which.js.map
|
||||
223
extracted-source/node_modules/@anthropic-ai/sdk/_vendor/partial-json-parser/parser.mjs
generated
vendored
223
extracted-source/node_modules/@anthropic-ai/sdk/_vendor/partial-json-parser/parser.mjs
generated
vendored
@@ -1,223 +0,0 @@
|
||||
const tokenize = (input) => {
|
||||
let current = 0;
|
||||
let tokens = [];
|
||||
while (current < input.length) {
|
||||
let char = input[current];
|
||||
if (char === '\\') {
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === '{') {
|
||||
tokens.push({
|
||||
type: 'brace',
|
||||
value: '{',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === '}') {
|
||||
tokens.push({
|
||||
type: 'brace',
|
||||
value: '}',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === '[') {
|
||||
tokens.push({
|
||||
type: 'paren',
|
||||
value: '[',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === ']') {
|
||||
tokens.push({
|
||||
type: 'paren',
|
||||
value: ']',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === ':') {
|
||||
tokens.push({
|
||||
type: 'separator',
|
||||
value: ':',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === ',') {
|
||||
tokens.push({
|
||||
type: 'delimiter',
|
||||
value: ',',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === '"') {
|
||||
let value = '';
|
||||
let danglingQuote = false;
|
||||
char = input[++current];
|
||||
while (char !== '"') {
|
||||
if (current === input.length) {
|
||||
danglingQuote = true;
|
||||
break;
|
||||
}
|
||||
if (char === '\\') {
|
||||
current++;
|
||||
if (current === input.length) {
|
||||
danglingQuote = true;
|
||||
break;
|
||||
}
|
||||
value += char + input[current];
|
||||
char = input[++current];
|
||||
}
|
||||
else {
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
}
|
||||
char = input[++current];
|
||||
if (!danglingQuote) {
|
||||
tokens.push({
|
||||
type: 'string',
|
||||
value,
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
let WHITESPACE = /\s/;
|
||||
if (char && WHITESPACE.test(char)) {
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
let NUMBERS = /[0-9]/;
|
||||
if ((char && NUMBERS.test(char)) || char === '-' || char === '.') {
|
||||
let value = '';
|
||||
if (char === '-') {
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
while ((char && NUMBERS.test(char)) || char === '.') {
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
tokens.push({
|
||||
type: 'number',
|
||||
value,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
let LETTERS = /[a-z]/i;
|
||||
if (char && LETTERS.test(char)) {
|
||||
let value = '';
|
||||
while (char && LETTERS.test(char)) {
|
||||
if (current === input.length) {
|
||||
break;
|
||||
}
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
if (value == 'true' || value == 'false' || value === 'null') {
|
||||
tokens.push({
|
||||
type: 'name',
|
||||
value,
|
||||
});
|
||||
}
|
||||
else {
|
||||
// unknown token, e.g. `nul` which isn't quite `null`
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
current++;
|
||||
}
|
||||
return tokens;
|
||||
}, strip = (tokens) => {
|
||||
if (tokens.length === 0) {
|
||||
return tokens;
|
||||
}
|
||||
let lastToken = tokens[tokens.length - 1];
|
||||
switch (lastToken.type) {
|
||||
case 'separator':
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
break;
|
||||
case 'number':
|
||||
let lastCharacterOfLastToken = lastToken.value[lastToken.value.length - 1];
|
||||
if (lastCharacterOfLastToken === '.' || lastCharacterOfLastToken === '-') {
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
}
|
||||
case 'string':
|
||||
let tokenBeforeTheLastToken = tokens[tokens.length - 2];
|
||||
if (tokenBeforeTheLastToken?.type === 'delimiter') {
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
}
|
||||
else if (tokenBeforeTheLastToken?.type === 'brace' && tokenBeforeTheLastToken.value === '{') {
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
}
|
||||
break;
|
||||
case 'delimiter':
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
break;
|
||||
}
|
||||
return tokens;
|
||||
}, unstrip = (tokens) => {
|
||||
let tail = [];
|
||||
tokens.map((token) => {
|
||||
if (token.type === 'brace') {
|
||||
if (token.value === '{') {
|
||||
tail.push('}');
|
||||
}
|
||||
else {
|
||||
tail.splice(tail.lastIndexOf('}'), 1);
|
||||
}
|
||||
}
|
||||
if (token.type === 'paren') {
|
||||
if (token.value === '[') {
|
||||
tail.push(']');
|
||||
}
|
||||
else {
|
||||
tail.splice(tail.lastIndexOf(']'), 1);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (tail.length > 0) {
|
||||
tail.reverse().map((item) => {
|
||||
if (item === '}') {
|
||||
tokens.push({
|
||||
type: 'brace',
|
||||
value: '}',
|
||||
});
|
||||
}
|
||||
else if (item === ']') {
|
||||
tokens.push({
|
||||
type: 'paren',
|
||||
value: ']',
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
return tokens;
|
||||
}, generate = (tokens) => {
|
||||
let output = '';
|
||||
tokens.map((token) => {
|
||||
switch (token.type) {
|
||||
case 'string':
|
||||
output += '"' + token.value + '"';
|
||||
break;
|
||||
default:
|
||||
output += token.value;
|
||||
break;
|
||||
}
|
||||
});
|
||||
return output;
|
||||
}, partialParse = (input) => JSON.parse(generate(unstrip(strip(tokenize(input)))));
|
||||
export { partialParse };
|
||||
//# sourceMappingURL=parser.mjs.map
|
||||
559
extracted-source/node_modules/@anthropic-ai/sdk/client.mjs
generated
vendored
559
extracted-source/node_modules/@anthropic-ai/sdk/client.mjs
generated
vendored
@@ -1,559 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
var _BaseAnthropic_instances, _a, _BaseAnthropic_encoder, _BaseAnthropic_baseURLOverridden;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "./internal/tslib.mjs";
|
||||
import { uuid4 } from "./internal/utils/uuid.mjs";
|
||||
import { validatePositiveInteger, isAbsoluteURL, safeJSON } from "./internal/utils/values.mjs";
|
||||
import { sleep } from "./internal/utils/sleep.mjs";
|
||||
import { castToError, isAbortError } from "./internal/errors.mjs";
|
||||
import { getPlatformHeaders } from "./internal/detect-platform.mjs";
|
||||
import * as Shims from "./internal/shims.mjs";
|
||||
import * as Opts from "./internal/request-options.mjs";
|
||||
import { VERSION } from "./version.mjs";
|
||||
import * as Errors from "./core/error.mjs";
|
||||
import * as Pagination from "./core/pagination.mjs";
|
||||
import * as Uploads from "./core/uploads.mjs";
|
||||
import * as API from "./resources/index.mjs";
|
||||
import { APIPromise } from "./core/api-promise.mjs";
|
||||
import { Completions, } from "./resources/completions.mjs";
|
||||
import { Models } from "./resources/models.mjs";
|
||||
import { Beta, } from "./resources/beta/beta.mjs";
|
||||
import { Messages, } from "./resources/messages/messages.mjs";
|
||||
import { isRunningInBrowser } from "./internal/detect-platform.mjs";
|
||||
import { buildHeaders } from "./internal/headers.mjs";
|
||||
import { readEnv } from "./internal/utils/env.mjs";
|
||||
import { formatRequestDetails, loggerFor, parseLogLevel, } from "./internal/utils/log.mjs";
|
||||
import { isEmptyObj } from "./internal/utils/values.mjs";
|
||||
export const HUMAN_PROMPT = '\\n\\nHuman:';
|
||||
export const AI_PROMPT = '\\n\\nAssistant:';
|
||||
/**
|
||||
* Base class for Anthropic API clients.
|
||||
*/
|
||||
export class BaseAnthropic {
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic API.
|
||||
*
|
||||
* @param {string | null | undefined} [opts.apiKey=process.env['ANTHROPIC_API_KEY'] ?? null]
|
||||
* @param {string | null | undefined} [opts.authToken=process.env['ANTHROPIC_AUTH_TOKEN'] ?? null]
|
||||
* @param {string} [opts.baseURL=process.env['ANTHROPIC_BASE_URL'] ?? https://api.anthropic.com] - Override the default base URL for the API.
|
||||
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
||||
* @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls.
|
||||
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
||||
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
||||
* @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API.
|
||||
* @param {Record<string, string | undefined>} opts.defaultQuery - Default query parameters to include with every request to the API.
|
||||
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
||||
*/
|
||||
constructor({ baseURL = readEnv('ANTHROPIC_BASE_URL'), apiKey = readEnv('ANTHROPIC_API_KEY') ?? null, authToken = readEnv('ANTHROPIC_AUTH_TOKEN') ?? null, ...opts } = {}) {
|
||||
_BaseAnthropic_instances.add(this);
|
||||
_BaseAnthropic_encoder.set(this, void 0);
|
||||
const options = {
|
||||
apiKey,
|
||||
authToken,
|
||||
...opts,
|
||||
baseURL: baseURL || `https://api.anthropic.com`,
|
||||
};
|
||||
if (!options.dangerouslyAllowBrowser && isRunningInBrowser()) {
|
||||
throw new Errors.AnthropicError("It looks like you're running in a browser-like environment.\n\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\nIf you understand the risks and have appropriate mitigations in place,\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\n\nnew Anthropic({ apiKey, dangerouslyAllowBrowser: true });\n");
|
||||
}
|
||||
this.baseURL = options.baseURL;
|
||||
this.timeout = options.timeout ?? _a.DEFAULT_TIMEOUT /* 10 minutes */;
|
||||
this.logger = options.logger ?? console;
|
||||
const defaultLogLevel = 'warn';
|
||||
// Set default logLevel early so that we can log a warning in parseLogLevel.
|
||||
this.logLevel = defaultLogLevel;
|
||||
this.logLevel =
|
||||
parseLogLevel(options.logLevel, 'ClientOptions.logLevel', this) ??
|
||||
parseLogLevel(readEnv('ANTHROPIC_LOG'), "process.env['ANTHROPIC_LOG']", this) ??
|
||||
defaultLogLevel;
|
||||
this.fetchOptions = options.fetchOptions;
|
||||
this.maxRetries = options.maxRetries ?? 2;
|
||||
this.fetch = options.fetch ?? Shims.getDefaultFetch();
|
||||
__classPrivateFieldSet(this, _BaseAnthropic_encoder, Opts.FallbackEncoder, "f");
|
||||
this._options = options;
|
||||
this.apiKey = typeof apiKey === 'string' ? apiKey : null;
|
||||
this.authToken = authToken;
|
||||
}
|
||||
/**
|
||||
* Create a new client instance re-using the same options given to the current client with optional overriding.
|
||||
*/
|
||||
withOptions(options) {
|
||||
const client = new this.constructor({
|
||||
...this._options,
|
||||
baseURL: this.baseURL,
|
||||
maxRetries: this.maxRetries,
|
||||
timeout: this.timeout,
|
||||
logger: this.logger,
|
||||
logLevel: this.logLevel,
|
||||
fetch: this.fetch,
|
||||
fetchOptions: this.fetchOptions,
|
||||
apiKey: this.apiKey,
|
||||
authToken: this.authToken,
|
||||
...options,
|
||||
});
|
||||
return client;
|
||||
}
|
||||
defaultQuery() {
|
||||
return this._options.defaultQuery;
|
||||
}
|
||||
validateHeaders({ values, nulls }) {
|
||||
if (values.get('x-api-key') || values.get('authorization')) {
|
||||
return;
|
||||
}
|
||||
if (this.apiKey && values.get('x-api-key')) {
|
||||
return;
|
||||
}
|
||||
if (nulls.has('x-api-key')) {
|
||||
return;
|
||||
}
|
||||
if (this.authToken && values.get('authorization')) {
|
||||
return;
|
||||
}
|
||||
if (nulls.has('authorization')) {
|
||||
return;
|
||||
}
|
||||
throw new Error('Could not resolve authentication method. Expected either apiKey or authToken to be set. Or for one of the "X-Api-Key" or "Authorization" headers to be explicitly omitted');
|
||||
}
|
||||
async authHeaders(opts) {
|
||||
return buildHeaders([await this.apiKeyAuth(opts), await this.bearerAuth(opts)]);
|
||||
}
|
||||
async apiKeyAuth(opts) {
|
||||
if (this.apiKey == null) {
|
||||
return undefined;
|
||||
}
|
||||
return buildHeaders([{ 'X-Api-Key': this.apiKey }]);
|
||||
}
|
||||
async bearerAuth(opts) {
|
||||
if (this.authToken == null) {
|
||||
return undefined;
|
||||
}
|
||||
return buildHeaders([{ Authorization: `Bearer ${this.authToken}` }]);
|
||||
}
|
||||
/**
|
||||
* Basic re-implementation of `qs.stringify` for primitive types.
|
||||
*/
|
||||
stringifyQuery(query) {
|
||||
return Object.entries(query)
|
||||
.filter(([_, value]) => typeof value !== 'undefined')
|
||||
.map(([key, value]) => {
|
||||
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
|
||||
return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`;
|
||||
}
|
||||
if (value === null) {
|
||||
return `${encodeURIComponent(key)}=`;
|
||||
}
|
||||
throw new Errors.AnthropicError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`);
|
||||
})
|
||||
.join('&');
|
||||
}
|
||||
getUserAgent() {
|
||||
return `${this.constructor.name}/JS ${VERSION}`;
|
||||
}
|
||||
defaultIdempotencyKey() {
|
||||
return `stainless-node-retry-${uuid4()}`;
|
||||
}
|
||||
makeStatusError(status, error, message, headers) {
|
||||
return Errors.APIError.generate(status, error, message, headers);
|
||||
}
|
||||
buildURL(path, query, defaultBaseURL) {
|
||||
const baseURL = (!__classPrivateFieldGet(this, _BaseAnthropic_instances, "m", _BaseAnthropic_baseURLOverridden).call(this) && defaultBaseURL) || this.baseURL;
|
||||
const url = isAbsoluteURL(path) ?
|
||||
new URL(path)
|
||||
: new URL(baseURL + (baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path));
|
||||
const defaultQuery = this.defaultQuery();
|
||||
if (!isEmptyObj(defaultQuery)) {
|
||||
query = { ...defaultQuery, ...query };
|
||||
}
|
||||
if (typeof query === 'object' && query && !Array.isArray(query)) {
|
||||
url.search = this.stringifyQuery(query);
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
_calculateNonstreamingTimeout(maxTokens) {
|
||||
const defaultTimeout = 10 * 60;
|
||||
const expectedTimeout = (60 * 60 * maxTokens) / 128000;
|
||||
if (expectedTimeout > defaultTimeout) {
|
||||
throw new Errors.AnthropicError('Streaming is required for operations that may take longer than 10 minutes. ' +
|
||||
'See https://github.com/anthropics/anthropic-sdk-typescript#streaming-responses for more details');
|
||||
}
|
||||
return defaultTimeout * 1000;
|
||||
}
|
||||
/**
|
||||
* Used as a callback for mutating the given `FinalRequestOptions` object.
|
||||
*/
|
||||
async prepareOptions(options) { }
|
||||
/**
|
||||
* Used as a callback for mutating the given `RequestInit` object.
|
||||
*
|
||||
* This is useful for cases where you want to add certain headers based off of
|
||||
* the request properties, e.g. `method` or `url`.
|
||||
*/
|
||||
async prepareRequest(request, { url, options }) { }
|
||||
get(path, opts) {
|
||||
return this.methodRequest('get', path, opts);
|
||||
}
|
||||
post(path, opts) {
|
||||
return this.methodRequest('post', path, opts);
|
||||
}
|
||||
patch(path, opts) {
|
||||
return this.methodRequest('patch', path, opts);
|
||||
}
|
||||
put(path, opts) {
|
||||
return this.methodRequest('put', path, opts);
|
||||
}
|
||||
delete(path, opts) {
|
||||
return this.methodRequest('delete', path, opts);
|
||||
}
|
||||
methodRequest(method, path, opts) {
|
||||
return this.request(Promise.resolve(opts).then((opts) => {
|
||||
return { method, path, ...opts };
|
||||
}));
|
||||
}
|
||||
request(options, remainingRetries = null) {
|
||||
return new APIPromise(this, this.makeRequest(options, remainingRetries, undefined));
|
||||
}
|
||||
async makeRequest(optionsInput, retriesRemaining, retryOfRequestLogID) {
|
||||
const options = await optionsInput;
|
||||
const maxRetries = options.maxRetries ?? this.maxRetries;
|
||||
if (retriesRemaining == null) {
|
||||
retriesRemaining = maxRetries;
|
||||
}
|
||||
await this.prepareOptions(options);
|
||||
const { req, url, timeout } = await this.buildRequest(options, {
|
||||
retryCount: maxRetries - retriesRemaining,
|
||||
});
|
||||
await this.prepareRequest(req, { url, options });
|
||||
/** Not an API request ID, just for correlating local log entries. */
|
||||
const requestLogID = 'log_' + ((Math.random() * (1 << 24)) | 0).toString(16).padStart(6, '0');
|
||||
const retryLogStr = retryOfRequestLogID === undefined ? '' : `, retryOf: ${retryOfRequestLogID}`;
|
||||
const startTime = Date.now();
|
||||
loggerFor(this).debug(`[${requestLogID}] sending request`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
method: options.method,
|
||||
url,
|
||||
options,
|
||||
headers: req.headers,
|
||||
}));
|
||||
if (options.signal?.aborted) {
|
||||
throw new Errors.APIUserAbortError();
|
||||
}
|
||||
const controller = new AbortController();
|
||||
const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(castToError);
|
||||
const headersTime = Date.now();
|
||||
if (response instanceof globalThis.Error) {
|
||||
const retryMessage = `retrying, ${retriesRemaining} attempts remaining`;
|
||||
if (options.signal?.aborted) {
|
||||
throw new Errors.APIUserAbortError();
|
||||
}
|
||||
// detect native connection timeout errors
|
||||
// deno throws "TypeError: error sending request for url (https://example/): client error (Connect): tcp connect error: Operation timed out (os error 60): Operation timed out (os error 60)"
|
||||
// undici throws "TypeError: fetch failed" with cause "ConnectTimeoutError: Connect Timeout Error (attempted address: example:443, timeout: 1ms)"
|
||||
// others do not provide enough information to distinguish timeouts from other connection errors
|
||||
const isTimeout = isAbortError(response) ||
|
||||
/timed? ?out/i.test(String(response) + ('cause' in response ? String(response.cause) : ''));
|
||||
if (retriesRemaining) {
|
||||
loggerFor(this).info(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} - ${retryMessage}`);
|
||||
loggerFor(this).debug(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} (${retryMessage})`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url,
|
||||
durationMs: headersTime - startTime,
|
||||
message: response.message,
|
||||
}));
|
||||
return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID);
|
||||
}
|
||||
loggerFor(this).info(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} - error; no more retries left`);
|
||||
loggerFor(this).debug(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} (error; no more retries left)`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url,
|
||||
durationMs: headersTime - startTime,
|
||||
message: response.message,
|
||||
}));
|
||||
if (isTimeout) {
|
||||
throw new Errors.APIConnectionTimeoutError();
|
||||
}
|
||||
throw new Errors.APIConnectionError({ cause: response });
|
||||
}
|
||||
const specialHeaders = [...response.headers.entries()]
|
||||
.filter(([name]) => name === 'request-id')
|
||||
.map(([name, value]) => ', ' + name + ': ' + JSON.stringify(value))
|
||||
.join('');
|
||||
const responseInfo = `[${requestLogID}${retryLogStr}${specialHeaders}] ${req.method} ${url} ${response.ok ? 'succeeded' : 'failed'} with status ${response.status} in ${headersTime - startTime}ms`;
|
||||
if (!response.ok) {
|
||||
const shouldRetry = await this.shouldRetry(response);
|
||||
if (retriesRemaining && shouldRetry) {
|
||||
const retryMessage = `retrying, ${retriesRemaining} attempts remaining`;
|
||||
// We don't need the body of this response.
|
||||
await Shims.CancelReadableStream(response.body);
|
||||
loggerFor(this).info(`${responseInfo} - ${retryMessage}`);
|
||||
loggerFor(this).debug(`[${requestLogID}] response error (${retryMessage})`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
durationMs: headersTime - startTime,
|
||||
}));
|
||||
return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID, response.headers);
|
||||
}
|
||||
const retryMessage = shouldRetry ? `error; no more retries left` : `error; not retryable`;
|
||||
loggerFor(this).info(`${responseInfo} - ${retryMessage}`);
|
||||
const errText = await response.text().catch((err) => castToError(err).message);
|
||||
const errJSON = safeJSON(errText);
|
||||
const errMessage = errJSON ? undefined : errText;
|
||||
loggerFor(this).debug(`[${requestLogID}] response error (${retryMessage})`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
message: errMessage,
|
||||
durationMs: Date.now() - startTime,
|
||||
}));
|
||||
const err = this.makeStatusError(response.status, errJSON, errMessage, response.headers);
|
||||
throw err;
|
||||
}
|
||||
loggerFor(this).info(responseInfo);
|
||||
loggerFor(this).debug(`[${requestLogID}] response start`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
durationMs: headersTime - startTime,
|
||||
}));
|
||||
return { response, options, controller, requestLogID, retryOfRequestLogID, startTime };
|
||||
}
|
||||
getAPIList(path, Page, opts) {
|
||||
return this.requestAPIList(Page, opts && 'then' in opts ?
|
||||
opts.then((opts) => ({ method: 'get', path, ...opts }))
|
||||
: { method: 'get', path, ...opts });
|
||||
}
|
||||
requestAPIList(Page, options) {
|
||||
const request = this.makeRequest(options, null, undefined);
|
||||
return new Pagination.PagePromise(this, request, Page);
|
||||
}
|
||||
async fetchWithTimeout(url, init, ms, controller) {
|
||||
const { signal, method, ...options } = init || {};
|
||||
// Avoid creating a closure over `this`, `init`, or `options` to prevent memory leaks.
|
||||
// An arrow function like `() => controller.abort()` captures the surrounding scope,
|
||||
// which includes the request body and other large objects. When the user passes a
|
||||
// long-lived AbortSignal, the listener prevents those objects from being GC'd for
|
||||
// the lifetime of the signal. Using `.bind()` only retains a reference to the
|
||||
// controller itself.
|
||||
const abort = this._makeAbort(controller);
|
||||
if (signal)
|
||||
signal.addEventListener('abort', abort, { once: true });
|
||||
const timeout = setTimeout(abort, ms);
|
||||
const isReadableBody = (globalThis.ReadableStream && options.body instanceof globalThis.ReadableStream) ||
|
||||
(typeof options.body === 'object' && options.body !== null && Symbol.asyncIterator in options.body);
|
||||
const fetchOptions = {
|
||||
signal: controller.signal,
|
||||
...(isReadableBody ? { duplex: 'half' } : {}),
|
||||
method: 'GET',
|
||||
...options,
|
||||
};
|
||||
if (method) {
|
||||
// Custom methods like 'patch' need to be uppercased
|
||||
// See https://github.com/nodejs/undici/issues/2294
|
||||
fetchOptions.method = method.toUpperCase();
|
||||
}
|
||||
try {
|
||||
// use undefined this binding; fetch errors if bound to something else in browser/cloudflare
|
||||
return await this.fetch.call(undefined, url, fetchOptions);
|
||||
}
|
||||
finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
async shouldRetry(response) {
|
||||
// Note this is not a standard header.
|
||||
const shouldRetryHeader = response.headers.get('x-should-retry');
|
||||
// If the server explicitly says whether or not to retry, obey.
|
||||
if (shouldRetryHeader === 'true')
|
||||
return true;
|
||||
if (shouldRetryHeader === 'false')
|
||||
return false;
|
||||
// Retry on request timeouts.
|
||||
if (response.status === 408)
|
||||
return true;
|
||||
// Retry on lock timeouts.
|
||||
if (response.status === 409)
|
||||
return true;
|
||||
// Retry on rate limits.
|
||||
if (response.status === 429)
|
||||
return true;
|
||||
// Retry internal errors.
|
||||
if (response.status >= 500)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
async retryRequest(options, retriesRemaining, requestLogID, responseHeaders) {
|
||||
let timeoutMillis;
|
||||
// Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it.
|
||||
const retryAfterMillisHeader = responseHeaders?.get('retry-after-ms');
|
||||
if (retryAfterMillisHeader) {
|
||||
const timeoutMs = parseFloat(retryAfterMillisHeader);
|
||||
if (!Number.isNaN(timeoutMs)) {
|
||||
timeoutMillis = timeoutMs;
|
||||
}
|
||||
}
|
||||
// About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
|
||||
const retryAfterHeader = responseHeaders?.get('retry-after');
|
||||
if (retryAfterHeader && !timeoutMillis) {
|
||||
const timeoutSeconds = parseFloat(retryAfterHeader);
|
||||
if (!Number.isNaN(timeoutSeconds)) {
|
||||
timeoutMillis = timeoutSeconds * 1000;
|
||||
}
|
||||
else {
|
||||
timeoutMillis = Date.parse(retryAfterHeader) - Date.now();
|
||||
}
|
||||
}
|
||||
// If the API asks us to wait a certain amount of time (and it's a reasonable amount),
|
||||
// just do what it says, but otherwise calculate a default
|
||||
if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) {
|
||||
const maxRetries = options.maxRetries ?? this.maxRetries;
|
||||
timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries);
|
||||
}
|
||||
await sleep(timeoutMillis);
|
||||
return this.makeRequest(options, retriesRemaining - 1, requestLogID);
|
||||
}
|
||||
calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) {
|
||||
const initialRetryDelay = 0.5;
|
||||
const maxRetryDelay = 8.0;
|
||||
const numRetries = maxRetries - retriesRemaining;
|
||||
// Apply exponential backoff, but not more than the max.
|
||||
const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay);
|
||||
// Apply some jitter, take up to at most 25 percent of the retry time.
|
||||
const jitter = 1 - Math.random() * 0.25;
|
||||
return sleepSeconds * jitter * 1000;
|
||||
}
|
||||
calculateNonstreamingTimeout(maxTokens, maxNonstreamingTokens) {
|
||||
const maxTime = 60 * 60 * 1000; // 60 minutes
|
||||
const defaultTime = 60 * 10 * 1000; // 10 minutes
|
||||
const expectedTime = (maxTime * maxTokens) / 128000;
|
||||
if (expectedTime > defaultTime || (maxNonstreamingTokens != null && maxTokens > maxNonstreamingTokens)) {
|
||||
throw new Errors.AnthropicError('Streaming is required for operations that may take longer than 10 minutes. See https://github.com/anthropics/anthropic-sdk-typescript#long-requests for more details');
|
||||
}
|
||||
return defaultTime;
|
||||
}
|
||||
async buildRequest(inputOptions, { retryCount = 0 } = {}) {
|
||||
const options = { ...inputOptions };
|
||||
const { method, path, query, defaultBaseURL } = options;
|
||||
const url = this.buildURL(path, query, defaultBaseURL);
|
||||
if ('timeout' in options)
|
||||
validatePositiveInteger('timeout', options.timeout);
|
||||
options.timeout = options.timeout ?? this.timeout;
|
||||
const { bodyHeaders, body } = this.buildBody({ options });
|
||||
const reqHeaders = await this.buildHeaders({ options: inputOptions, method, bodyHeaders, retryCount });
|
||||
const req = {
|
||||
method,
|
||||
headers: reqHeaders,
|
||||
...(options.signal && { signal: options.signal }),
|
||||
...(globalThis.ReadableStream &&
|
||||
body instanceof globalThis.ReadableStream && { duplex: 'half' }),
|
||||
...(body && { body }),
|
||||
...(this.fetchOptions ?? {}),
|
||||
...(options.fetchOptions ?? {}),
|
||||
};
|
||||
return { req, url, timeout: options.timeout };
|
||||
}
|
||||
async buildHeaders({ options, method, bodyHeaders, retryCount, }) {
|
||||
let idempotencyHeaders = {};
|
||||
if (this.idempotencyHeader && method !== 'get') {
|
||||
if (!options.idempotencyKey)
|
||||
options.idempotencyKey = this.defaultIdempotencyKey();
|
||||
idempotencyHeaders[this.idempotencyHeader] = options.idempotencyKey;
|
||||
}
|
||||
const headers = buildHeaders([
|
||||
idempotencyHeaders,
|
||||
{
|
||||
Accept: 'application/json',
|
||||
'User-Agent': this.getUserAgent(),
|
||||
'X-Stainless-Retry-Count': String(retryCount),
|
||||
...(options.timeout ? { 'X-Stainless-Timeout': String(Math.trunc(options.timeout / 1000)) } : {}),
|
||||
...getPlatformHeaders(),
|
||||
...(this._options.dangerouslyAllowBrowser ?
|
||||
{ 'anthropic-dangerous-direct-browser-access': 'true' }
|
||||
: undefined),
|
||||
'anthropic-version': '2023-06-01',
|
||||
},
|
||||
await this.authHeaders(options),
|
||||
this._options.defaultHeaders,
|
||||
bodyHeaders,
|
||||
options.headers,
|
||||
]);
|
||||
this.validateHeaders(headers);
|
||||
return headers.values;
|
||||
}
|
||||
_makeAbort(controller) {
|
||||
// note: we can't just inline this method inside `fetchWithTimeout()` because then the closure
|
||||
// would capture all request options, and cause a memory leak.
|
||||
return () => controller.abort();
|
||||
}
|
||||
buildBody({ options: { body, headers: rawHeaders } }) {
|
||||
if (!body) {
|
||||
return { bodyHeaders: undefined, body: undefined };
|
||||
}
|
||||
const headers = buildHeaders([rawHeaders]);
|
||||
if (
|
||||
// Pass raw type verbatim
|
||||
ArrayBuffer.isView(body) ||
|
||||
body instanceof ArrayBuffer ||
|
||||
body instanceof DataView ||
|
||||
(typeof body === 'string' &&
|
||||
// Preserve legacy string encoding behavior for now
|
||||
headers.values.has('content-type')) ||
|
||||
// `Blob` is superset of `File`
|
||||
(globalThis.Blob && body instanceof globalThis.Blob) ||
|
||||
// `FormData` -> `multipart/form-data`
|
||||
body instanceof FormData ||
|
||||
// `URLSearchParams` -> `application/x-www-form-urlencoded`
|
||||
body instanceof URLSearchParams ||
|
||||
// Send chunked stream (each chunk has own `length`)
|
||||
(globalThis.ReadableStream && body instanceof globalThis.ReadableStream)) {
|
||||
return { bodyHeaders: undefined, body: body };
|
||||
}
|
||||
else if (typeof body === 'object' &&
|
||||
(Symbol.asyncIterator in body ||
|
||||
(Symbol.iterator in body && 'next' in body && typeof body.next === 'function'))) {
|
||||
return { bodyHeaders: undefined, body: Shims.ReadableStreamFrom(body) };
|
||||
}
|
||||
else {
|
||||
return __classPrivateFieldGet(this, _BaseAnthropic_encoder, "f").call(this, { body, headers });
|
||||
}
|
||||
}
|
||||
}
|
||||
_a = BaseAnthropic, _BaseAnthropic_encoder = new WeakMap(), _BaseAnthropic_instances = new WeakSet(), _BaseAnthropic_baseURLOverridden = function _BaseAnthropic_baseURLOverridden() {
|
||||
return this.baseURL !== 'https://api.anthropic.com';
|
||||
};
|
||||
BaseAnthropic.Anthropic = _a;
|
||||
BaseAnthropic.HUMAN_PROMPT = HUMAN_PROMPT;
|
||||
BaseAnthropic.AI_PROMPT = AI_PROMPT;
|
||||
BaseAnthropic.DEFAULT_TIMEOUT = 600000; // 10 minutes
|
||||
BaseAnthropic.AnthropicError = Errors.AnthropicError;
|
||||
BaseAnthropic.APIError = Errors.APIError;
|
||||
BaseAnthropic.APIConnectionError = Errors.APIConnectionError;
|
||||
BaseAnthropic.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError;
|
||||
BaseAnthropic.APIUserAbortError = Errors.APIUserAbortError;
|
||||
BaseAnthropic.NotFoundError = Errors.NotFoundError;
|
||||
BaseAnthropic.ConflictError = Errors.ConflictError;
|
||||
BaseAnthropic.RateLimitError = Errors.RateLimitError;
|
||||
BaseAnthropic.BadRequestError = Errors.BadRequestError;
|
||||
BaseAnthropic.AuthenticationError = Errors.AuthenticationError;
|
||||
BaseAnthropic.InternalServerError = Errors.InternalServerError;
|
||||
BaseAnthropic.PermissionDeniedError = Errors.PermissionDeniedError;
|
||||
BaseAnthropic.UnprocessableEntityError = Errors.UnprocessableEntityError;
|
||||
BaseAnthropic.toFile = Uploads.toFile;
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic API.
|
||||
*/
|
||||
export class Anthropic extends BaseAnthropic {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.completions = new API.Completions(this);
|
||||
this.messages = new API.Messages(this);
|
||||
this.models = new API.Models(this);
|
||||
this.beta = new API.Beta(this);
|
||||
}
|
||||
}
|
||||
Anthropic.Completions = Completions;
|
||||
Anthropic.Messages = Messages;
|
||||
Anthropic.Models = Models;
|
||||
Anthropic.Beta = Beta;
|
||||
//# sourceMappingURL=client.mjs.map
|
||||
72
extracted-source/node_modules/@anthropic-ai/sdk/core/api-promise.mjs
generated
vendored
72
extracted-source/node_modules/@anthropic-ai/sdk/core/api-promise.mjs
generated
vendored
@@ -1,72 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
var _APIPromise_client;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../internal/tslib.mjs";
|
||||
import { defaultParseResponse, addRequestID, } from "../internal/parse.mjs";
|
||||
/**
|
||||
* A subclass of `Promise` providing additional helper methods
|
||||
* for interacting with the SDK.
|
||||
*/
|
||||
export class APIPromise extends Promise {
|
||||
constructor(client, responsePromise, parseResponse = defaultParseResponse) {
|
||||
super((resolve) => {
|
||||
// this is maybe a bit weird but this has to be a no-op to not implicitly
|
||||
// parse the response body; instead .then, .catch, .finally are overridden
|
||||
// to parse the response
|
||||
resolve(null);
|
||||
});
|
||||
this.responsePromise = responsePromise;
|
||||
this.parseResponse = parseResponse;
|
||||
_APIPromise_client.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _APIPromise_client, client, "f");
|
||||
}
|
||||
_thenUnwrap(transform) {
|
||||
return new APIPromise(__classPrivateFieldGet(this, _APIPromise_client, "f"), this.responsePromise, async (client, props) => addRequestID(transform(await this.parseResponse(client, props), props), props.response));
|
||||
}
|
||||
/**
|
||||
* Gets the raw `Response` instance instead of parsing the response
|
||||
* data.
|
||||
*
|
||||
* If you want to parse the response body but still get the `Response`
|
||||
* instance, you can use {@link withResponse()}.
|
||||
*
|
||||
* 👋 Getting the wrong TypeScript type for `Response`?
|
||||
* Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]`
|
||||
* to your `tsconfig.json`.
|
||||
*/
|
||||
asResponse() {
|
||||
return this.responsePromise.then((p) => p.response);
|
||||
}
|
||||
/**
|
||||
* Gets the parsed response data, the raw `Response` instance and the ID of the request,
|
||||
* returned via the `request-id` header which is useful for debugging requests and resporting
|
||||
* issues to Anthropic.
|
||||
*
|
||||
* If you just want to get the raw `Response` instance without parsing it,
|
||||
* you can use {@link asResponse()}.
|
||||
*
|
||||
* 👋 Getting the wrong TypeScript type for `Response`?
|
||||
* Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]`
|
||||
* to your `tsconfig.json`.
|
||||
*/
|
||||
async withResponse() {
|
||||
const [data, response] = await Promise.all([this.parse(), this.asResponse()]);
|
||||
return { data, response, request_id: response.headers.get('request-id') };
|
||||
}
|
||||
parse() {
|
||||
if (!this.parsedPromise) {
|
||||
this.parsedPromise = this.responsePromise.then((data) => this.parseResponse(__classPrivateFieldGet(this, _APIPromise_client, "f"), data));
|
||||
}
|
||||
return this.parsedPromise;
|
||||
}
|
||||
then(onfulfilled, onrejected) {
|
||||
return this.parse().then(onfulfilled, onrejected);
|
||||
}
|
||||
catch(onrejected) {
|
||||
return this.parse().catch(onrejected);
|
||||
}
|
||||
finally(onfinally) {
|
||||
return this.parse().finally(onfinally);
|
||||
}
|
||||
}
|
||||
_APIPromise_client = new WeakMap();
|
||||
//# sourceMappingURL=api-promise.mjs.map
|
||||
98
extracted-source/node_modules/@anthropic-ai/sdk/core/error.mjs
generated
vendored
98
extracted-source/node_modules/@anthropic-ai/sdk/core/error.mjs
generated
vendored
@@ -1,98 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { castToError } from "../internal/errors.mjs";
|
||||
export class AnthropicError extends Error {
|
||||
}
|
||||
export class APIError extends AnthropicError {
|
||||
constructor(status, error, message, headers) {
|
||||
super(`${APIError.makeMessage(status, error, message)}`);
|
||||
this.status = status;
|
||||
this.headers = headers;
|
||||
this.requestID = headers?.get('request-id');
|
||||
this.error = error;
|
||||
}
|
||||
static makeMessage(status, error, message) {
|
||||
const msg = error?.message ?
|
||||
typeof error.message === 'string' ?
|
||||
error.message
|
||||
: JSON.stringify(error.message)
|
||||
: error ? JSON.stringify(error)
|
||||
: message;
|
||||
if (status && msg) {
|
||||
return `${status} ${msg}`;
|
||||
}
|
||||
if (status) {
|
||||
return `${status} status code (no body)`;
|
||||
}
|
||||
if (msg) {
|
||||
return msg;
|
||||
}
|
||||
return '(no status code or body)';
|
||||
}
|
||||
static generate(status, errorResponse, message, headers) {
|
||||
if (!status || !headers) {
|
||||
return new APIConnectionError({ message, cause: castToError(errorResponse) });
|
||||
}
|
||||
const error = errorResponse;
|
||||
if (status === 400) {
|
||||
return new BadRequestError(status, error, message, headers);
|
||||
}
|
||||
if (status === 401) {
|
||||
return new AuthenticationError(status, error, message, headers);
|
||||
}
|
||||
if (status === 403) {
|
||||
return new PermissionDeniedError(status, error, message, headers);
|
||||
}
|
||||
if (status === 404) {
|
||||
return new NotFoundError(status, error, message, headers);
|
||||
}
|
||||
if (status === 409) {
|
||||
return new ConflictError(status, error, message, headers);
|
||||
}
|
||||
if (status === 422) {
|
||||
return new UnprocessableEntityError(status, error, message, headers);
|
||||
}
|
||||
if (status === 429) {
|
||||
return new RateLimitError(status, error, message, headers);
|
||||
}
|
||||
if (status >= 500) {
|
||||
return new InternalServerError(status, error, message, headers);
|
||||
}
|
||||
return new APIError(status, error, message, headers);
|
||||
}
|
||||
}
|
||||
export class APIUserAbortError extends APIError {
|
||||
constructor({ message } = {}) {
|
||||
super(undefined, undefined, message || 'Request was aborted.', undefined);
|
||||
}
|
||||
}
|
||||
export class APIConnectionError extends APIError {
|
||||
constructor({ message, cause }) {
|
||||
super(undefined, undefined, message || 'Connection error.', undefined);
|
||||
// in some environments the 'cause' property is already declared
|
||||
// @ts-ignore
|
||||
if (cause)
|
||||
this.cause = cause;
|
||||
}
|
||||
}
|
||||
export class APIConnectionTimeoutError extends APIConnectionError {
|
||||
constructor({ message } = {}) {
|
||||
super({ message: message ?? 'Request timed out.' });
|
||||
}
|
||||
}
|
||||
export class BadRequestError extends APIError {
|
||||
}
|
||||
export class AuthenticationError extends APIError {
|
||||
}
|
||||
export class PermissionDeniedError extends APIError {
|
||||
}
|
||||
export class NotFoundError extends APIError {
|
||||
}
|
||||
export class ConflictError extends APIError {
|
||||
}
|
||||
export class UnprocessableEntityError extends APIError {
|
||||
}
|
||||
export class RateLimitError extends APIError {
|
||||
}
|
||||
export class InternalServerError extends APIError {
|
||||
}
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
177
extracted-source/node_modules/@anthropic-ai/sdk/core/pagination.mjs
generated
vendored
177
extracted-source/node_modules/@anthropic-ai/sdk/core/pagination.mjs
generated
vendored
@@ -1,177 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
var _AbstractPage_client;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../internal/tslib.mjs";
|
||||
import { AnthropicError } from "./error.mjs";
|
||||
import { defaultParseResponse } from "../internal/parse.mjs";
|
||||
import { APIPromise } from "./api-promise.mjs";
|
||||
import { maybeObj } from "../internal/utils/values.mjs";
|
||||
export class AbstractPage {
|
||||
constructor(client, response, body, options) {
|
||||
_AbstractPage_client.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _AbstractPage_client, client, "f");
|
||||
this.options = options;
|
||||
this.response = response;
|
||||
this.body = body;
|
||||
}
|
||||
hasNextPage() {
|
||||
const items = this.getPaginatedItems();
|
||||
if (!items.length)
|
||||
return false;
|
||||
return this.nextPageRequestOptions() != null;
|
||||
}
|
||||
async getNextPage() {
|
||||
const nextOptions = this.nextPageRequestOptions();
|
||||
if (!nextOptions) {
|
||||
throw new AnthropicError('No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.');
|
||||
}
|
||||
return await __classPrivateFieldGet(this, _AbstractPage_client, "f").requestAPIList(this.constructor, nextOptions);
|
||||
}
|
||||
async *iterPages() {
|
||||
let page = this;
|
||||
yield page;
|
||||
while (page.hasNextPage()) {
|
||||
page = await page.getNextPage();
|
||||
yield page;
|
||||
}
|
||||
}
|
||||
async *[(_AbstractPage_client = new WeakMap(), Symbol.asyncIterator)]() {
|
||||
for await (const page of this.iterPages()) {
|
||||
for (const item of page.getPaginatedItems()) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This subclass of Promise will resolve to an instantiated Page once the request completes.
|
||||
*
|
||||
* It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg:
|
||||
*
|
||||
* for await (const item of client.items.list()) {
|
||||
* console.log(item)
|
||||
* }
|
||||
*/
|
||||
export class PagePromise extends APIPromise {
|
||||
constructor(client, request, Page) {
|
||||
super(client, request, async (client, props) => new Page(client, props.response, await defaultParseResponse(client, props), props.options));
|
||||
}
|
||||
/**
|
||||
* Allow auto-paginating iteration on an unawaited list call, eg:
|
||||
*
|
||||
* for await (const item of client.items.list()) {
|
||||
* console.log(item)
|
||||
* }
|
||||
*/
|
||||
async *[Symbol.asyncIterator]() {
|
||||
const page = await this;
|
||||
for await (const item of page) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
export class Page extends AbstractPage {
|
||||
constructor(client, response, body, options) {
|
||||
super(client, response, body, options);
|
||||
this.data = body.data || [];
|
||||
this.has_more = body.has_more || false;
|
||||
this.first_id = body.first_id || null;
|
||||
this.last_id = body.last_id || null;
|
||||
}
|
||||
getPaginatedItems() {
|
||||
return this.data ?? [];
|
||||
}
|
||||
hasNextPage() {
|
||||
if (this.has_more === false) {
|
||||
return false;
|
||||
}
|
||||
return super.hasNextPage();
|
||||
}
|
||||
nextPageRequestOptions() {
|
||||
if (this.options.query?.['before_id']) {
|
||||
// in reverse
|
||||
const first_id = this.first_id;
|
||||
if (!first_id) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...this.options,
|
||||
query: {
|
||||
...maybeObj(this.options.query),
|
||||
before_id: first_id,
|
||||
},
|
||||
};
|
||||
}
|
||||
const cursor = this.last_id;
|
||||
if (!cursor) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...this.options,
|
||||
query: {
|
||||
...maybeObj(this.options.query),
|
||||
after_id: cursor,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
export class TokenPage extends AbstractPage {
|
||||
constructor(client, response, body, options) {
|
||||
super(client, response, body, options);
|
||||
this.data = body.data || [];
|
||||
this.has_more = body.has_more || false;
|
||||
this.next_page = body.next_page || null;
|
||||
}
|
||||
getPaginatedItems() {
|
||||
return this.data ?? [];
|
||||
}
|
||||
hasNextPage() {
|
||||
if (this.has_more === false) {
|
||||
return false;
|
||||
}
|
||||
return super.hasNextPage();
|
||||
}
|
||||
nextPageRequestOptions() {
|
||||
const cursor = this.next_page;
|
||||
if (!cursor) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...this.options,
|
||||
query: {
|
||||
...maybeObj(this.options.query),
|
||||
page_token: cursor,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
export class PageCursor extends AbstractPage {
|
||||
constructor(client, response, body, options) {
|
||||
super(client, response, body, options);
|
||||
this.data = body.data || [];
|
||||
this.has_more = body.has_more || false;
|
||||
this.next_page = body.next_page || null;
|
||||
}
|
||||
getPaginatedItems() {
|
||||
return this.data ?? [];
|
||||
}
|
||||
hasNextPage() {
|
||||
if (this.has_more === false) {
|
||||
return false;
|
||||
}
|
||||
return super.hasNextPage();
|
||||
}
|
||||
nextPageRequestOptions() {
|
||||
const cursor = this.next_page;
|
||||
if (!cursor) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...this.options,
|
||||
query: {
|
||||
...maybeObj(this.options.query),
|
||||
page: cursor,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=pagination.mjs.map
|
||||
7
extracted-source/node_modules/@anthropic-ai/sdk/core/resource.mjs
generated
vendored
7
extracted-source/node_modules/@anthropic-ai/sdk/core/resource.mjs
generated
vendored
@@ -1,7 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export class APIResource {
|
||||
constructor(client) {
|
||||
this._client = client;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=resource.mjs.map
|
||||
283
extracted-source/node_modules/@anthropic-ai/sdk/core/streaming.mjs
generated
vendored
283
extracted-source/node_modules/@anthropic-ai/sdk/core/streaming.mjs
generated
vendored
@@ -1,283 +0,0 @@
|
||||
var _Stream_client;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../internal/tslib.mjs";
|
||||
import { AnthropicError } from "./error.mjs";
|
||||
import { makeReadableStream } from "../internal/shims.mjs";
|
||||
import { findDoubleNewlineIndex, LineDecoder } from "../internal/decoders/line.mjs";
|
||||
import { ReadableStreamToAsyncIterable } from "../internal/shims.mjs";
|
||||
import { isAbortError } from "../internal/errors.mjs";
|
||||
import { safeJSON } from "../internal/utils/values.mjs";
|
||||
import { encodeUTF8 } from "../internal/utils/bytes.mjs";
|
||||
import { loggerFor } from "../internal/utils/log.mjs";
|
||||
import { APIError } from "./error.mjs";
|
||||
export class Stream {
|
||||
constructor(iterator, controller, client) {
|
||||
this.iterator = iterator;
|
||||
_Stream_client.set(this, void 0);
|
||||
this.controller = controller;
|
||||
__classPrivateFieldSet(this, _Stream_client, client, "f");
|
||||
}
|
||||
static fromSSEResponse(response, controller, client) {
|
||||
let consumed = false;
|
||||
const logger = client ? loggerFor(client) : console;
|
||||
async function* iterator() {
|
||||
if (consumed) {
|
||||
throw new AnthropicError('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
|
||||
}
|
||||
consumed = true;
|
||||
let done = false;
|
||||
try {
|
||||
for await (const sse of _iterSSEMessages(response, controller)) {
|
||||
if (sse.event === 'completion') {
|
||||
try {
|
||||
yield JSON.parse(sse.data);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(`Could not parse message into JSON:`, sse.data);
|
||||
logger.error(`From chunk:`, sse.raw);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (sse.event === 'message_start' ||
|
||||
sse.event === 'message_delta' ||
|
||||
sse.event === 'message_stop' ||
|
||||
sse.event === 'content_block_start' ||
|
||||
sse.event === 'content_block_delta' ||
|
||||
sse.event === 'content_block_stop') {
|
||||
try {
|
||||
yield JSON.parse(sse.data);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(`Could not parse message into JSON:`, sse.data);
|
||||
logger.error(`From chunk:`, sse.raw);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (sse.event === 'ping') {
|
||||
continue;
|
||||
}
|
||||
if (sse.event === 'error') {
|
||||
throw new APIError(undefined, safeJSON(sse.data) ?? sse.data, undefined, response.headers);
|
||||
}
|
||||
}
|
||||
done = true;
|
||||
}
|
||||
catch (e) {
|
||||
// If the user calls `stream.controller.abort()`, we should exit without throwing.
|
||||
if (isAbortError(e))
|
||||
return;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
// If the user `break`s, abort the ongoing request.
|
||||
if (!done)
|
||||
controller.abort();
|
||||
}
|
||||
}
|
||||
return new Stream(iterator, controller, client);
|
||||
}
|
||||
/**
|
||||
* Generates a Stream from a newline-separated ReadableStream
|
||||
* where each item is a JSON value.
|
||||
*/
|
||||
static fromReadableStream(readableStream, controller, client) {
|
||||
let consumed = false;
|
||||
async function* iterLines() {
|
||||
const lineDecoder = new LineDecoder();
|
||||
const iter = ReadableStreamToAsyncIterable(readableStream);
|
||||
for await (const chunk of iter) {
|
||||
for (const line of lineDecoder.decode(chunk)) {
|
||||
yield line;
|
||||
}
|
||||
}
|
||||
for (const line of lineDecoder.flush()) {
|
||||
yield line;
|
||||
}
|
||||
}
|
||||
async function* iterator() {
|
||||
if (consumed) {
|
||||
throw new AnthropicError('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
|
||||
}
|
||||
consumed = true;
|
||||
let done = false;
|
||||
try {
|
||||
for await (const line of iterLines()) {
|
||||
if (done)
|
||||
continue;
|
||||
if (line)
|
||||
yield JSON.parse(line);
|
||||
}
|
||||
done = true;
|
||||
}
|
||||
catch (e) {
|
||||
// If the user calls `stream.controller.abort()`, we should exit without throwing.
|
||||
if (isAbortError(e))
|
||||
return;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
// If the user `break`s, abort the ongoing request.
|
||||
if (!done)
|
||||
controller.abort();
|
||||
}
|
||||
}
|
||||
return new Stream(iterator, controller, client);
|
||||
}
|
||||
[(_Stream_client = new WeakMap(), Symbol.asyncIterator)]() {
|
||||
return this.iterator();
|
||||
}
|
||||
/**
|
||||
* Splits the stream into two streams which can be
|
||||
* independently read from at different speeds.
|
||||
*/
|
||||
tee() {
|
||||
const left = [];
|
||||
const right = [];
|
||||
const iterator = this.iterator();
|
||||
const teeIterator = (queue) => {
|
||||
return {
|
||||
next: () => {
|
||||
if (queue.length === 0) {
|
||||
const result = iterator.next();
|
||||
left.push(result);
|
||||
right.push(result);
|
||||
}
|
||||
return queue.shift();
|
||||
},
|
||||
};
|
||||
};
|
||||
return [
|
||||
new Stream(() => teeIterator(left), this.controller, __classPrivateFieldGet(this, _Stream_client, "f")),
|
||||
new Stream(() => teeIterator(right), this.controller, __classPrivateFieldGet(this, _Stream_client, "f")),
|
||||
];
|
||||
}
|
||||
/**
|
||||
* Converts this stream to a newline-separated ReadableStream of
|
||||
* JSON stringified values in the stream
|
||||
* which can be turned back into a Stream with `Stream.fromReadableStream()`.
|
||||
*/
|
||||
toReadableStream() {
|
||||
const self = this;
|
||||
let iter;
|
||||
return makeReadableStream({
|
||||
async start() {
|
||||
iter = self[Symbol.asyncIterator]();
|
||||
},
|
||||
async pull(ctrl) {
|
||||
try {
|
||||
const { value, done } = await iter.next();
|
||||
if (done)
|
||||
return ctrl.close();
|
||||
const bytes = encodeUTF8(JSON.stringify(value) + '\n');
|
||||
ctrl.enqueue(bytes);
|
||||
}
|
||||
catch (err) {
|
||||
ctrl.error(err);
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await iter.return?.();
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
export async function* _iterSSEMessages(response, controller) {
|
||||
if (!response.body) {
|
||||
controller.abort();
|
||||
if (typeof globalThis.navigator !== 'undefined' &&
|
||||
globalThis.navigator.product === 'ReactNative') {
|
||||
throw new AnthropicError(`The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`);
|
||||
}
|
||||
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
||||
}
|
||||
const sseDecoder = new SSEDecoder();
|
||||
const lineDecoder = new LineDecoder();
|
||||
const iter = ReadableStreamToAsyncIterable(response.body);
|
||||
for await (const sseChunk of iterSSEChunks(iter)) {
|
||||
for (const line of lineDecoder.decode(sseChunk)) {
|
||||
const sse = sseDecoder.decode(line);
|
||||
if (sse)
|
||||
yield sse;
|
||||
}
|
||||
}
|
||||
for (const line of lineDecoder.flush()) {
|
||||
const sse = sseDecoder.decode(line);
|
||||
if (sse)
|
||||
yield sse;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Given an async iterable iterator, iterates over it and yields full
|
||||
* SSE chunks, i.e. yields when a double new-line is encountered.
|
||||
*/
|
||||
async function* iterSSEChunks(iterator) {
|
||||
let data = new Uint8Array();
|
||||
for await (const chunk of iterator) {
|
||||
if (chunk == null) {
|
||||
continue;
|
||||
}
|
||||
const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk)
|
||||
: typeof chunk === 'string' ? encodeUTF8(chunk)
|
||||
: chunk;
|
||||
let newData = new Uint8Array(data.length + binaryChunk.length);
|
||||
newData.set(data);
|
||||
newData.set(binaryChunk, data.length);
|
||||
data = newData;
|
||||
let patternIndex;
|
||||
while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) {
|
||||
yield data.slice(0, patternIndex);
|
||||
data = data.slice(patternIndex);
|
||||
}
|
||||
}
|
||||
if (data.length > 0) {
|
||||
yield data;
|
||||
}
|
||||
}
|
||||
class SSEDecoder {
|
||||
constructor() {
|
||||
this.event = null;
|
||||
this.data = [];
|
||||
this.chunks = [];
|
||||
}
|
||||
decode(line) {
|
||||
if (line.endsWith('\r')) {
|
||||
line = line.substring(0, line.length - 1);
|
||||
}
|
||||
if (!line) {
|
||||
// empty line and we didn't previously encounter any messages
|
||||
if (!this.event && !this.data.length)
|
||||
return null;
|
||||
const sse = {
|
||||
event: this.event,
|
||||
data: this.data.join('\n'),
|
||||
raw: this.chunks,
|
||||
};
|
||||
this.event = null;
|
||||
this.data = [];
|
||||
this.chunks = [];
|
||||
return sse;
|
||||
}
|
||||
this.chunks.push(line);
|
||||
if (line.startsWith(':')) {
|
||||
return null;
|
||||
}
|
||||
let [fieldname, _, value] = partition(line, ':');
|
||||
if (value.startsWith(' ')) {
|
||||
value = value.substring(1);
|
||||
}
|
||||
if (fieldname === 'event') {
|
||||
this.event = value;
|
||||
}
|
||||
else if (fieldname === 'data') {
|
||||
this.data.push(value);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
function partition(str, delimiter) {
|
||||
const index = str.indexOf(delimiter);
|
||||
if (index !== -1) {
|
||||
return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)];
|
||||
}
|
||||
return [str, '', ''];
|
||||
}
|
||||
//# sourceMappingURL=streaming.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/sdk/core/uploads.mjs
generated
vendored
2
extracted-source/node_modules/@anthropic-ai/sdk/core/uploads.mjs
generated
vendored
@@ -1,2 +0,0 @@
|
||||
export { toFile } from "../internal/to-file.mjs";
|
||||
//# sourceMappingURL=uploads.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/sdk/error.mjs
generated
vendored
2
extracted-source/node_modules/@anthropic-ai/sdk/error.mjs
generated
vendored
@@ -1,2 +0,0 @@
|
||||
export * from "./core/error.mjs";
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
8
extracted-source/node_modules/@anthropic-ai/sdk/index.mjs
generated
vendored
8
extracted-source/node_modules/@anthropic-ai/sdk/index.mjs
generated
vendored
@@ -1,8 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export { Anthropic as default } from "./client.mjs";
|
||||
export { toFile } from "./core/uploads.mjs";
|
||||
export { APIPromise } from "./core/api-promise.mjs";
|
||||
export { BaseAnthropic, Anthropic, HUMAN_PROMPT, AI_PROMPT } from "./client.mjs";
|
||||
export { PagePromise } from "./core/pagination.mjs";
|
||||
export { AnthropicError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, NotFoundError, ConflictError, RateLimitError, BadRequestError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, } from "./core/error.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/constants.mjs
generated
vendored
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/constants.mjs
generated
vendored
@@ -1,15 +0,0 @@
|
||||
// File containing shared constants
|
||||
/**
|
||||
* Model-specific timeout constraints for non-streaming requests
|
||||
*/
|
||||
export const MODEL_NONSTREAMING_TOKENS = {
|
||||
'claude-opus-4-20250514': 8192,
|
||||
'claude-opus-4-0': 8192,
|
||||
'claude-4-opus-20250514': 8192,
|
||||
'anthropic.claude-opus-4-20250514-v1:0': 8192,
|
||||
'claude-opus-4@20250514': 8192,
|
||||
'claude-opus-4-1-20250805': 8192,
|
||||
'anthropic.claude-opus-4-1-20250805-v1:0': 8192,
|
||||
'claude-opus-4-1@20250805': 8192,
|
||||
};
|
||||
//# sourceMappingURL=constants.mjs.map
|
||||
35
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/jsonl.mjs
generated
vendored
35
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/jsonl.mjs
generated
vendored
@@ -1,35 +0,0 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
import { ReadableStreamToAsyncIterable } from "../shims.mjs";
|
||||
import { LineDecoder } from "./line.mjs";
|
||||
export class JSONLDecoder {
|
||||
constructor(iterator, controller) {
|
||||
this.iterator = iterator;
|
||||
this.controller = controller;
|
||||
}
|
||||
async *decoder() {
|
||||
const lineDecoder = new LineDecoder();
|
||||
for await (const chunk of this.iterator) {
|
||||
for (const line of lineDecoder.decode(chunk)) {
|
||||
yield JSON.parse(line);
|
||||
}
|
||||
}
|
||||
for (const line of lineDecoder.flush()) {
|
||||
yield JSON.parse(line);
|
||||
}
|
||||
}
|
||||
[Symbol.asyncIterator]() {
|
||||
return this.decoder();
|
||||
}
|
||||
static fromResponse(response, controller) {
|
||||
if (!response.body) {
|
||||
controller.abort();
|
||||
if (typeof globalThis.navigator !== 'undefined' &&
|
||||
globalThis.navigator.product === 'ReactNative') {
|
||||
throw new AnthropicError(`The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`);
|
||||
}
|
||||
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
||||
}
|
||||
return new JSONLDecoder(ReadableStreamToAsyncIterable(response.body), controller);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=jsonl.mjs.map
|
||||
108
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/line.mjs
generated
vendored
108
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/line.mjs
generated
vendored
@@ -1,108 +0,0 @@
|
||||
var _LineDecoder_buffer, _LineDecoder_carriageReturnIndex;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../tslib.mjs";
|
||||
import { concatBytes, decodeUTF8, encodeUTF8 } from "../utils/bytes.mjs";
|
||||
/**
|
||||
* A re-implementation of httpx's `LineDecoder` in Python that handles incrementally
|
||||
* reading lines from text.
|
||||
*
|
||||
* https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258
|
||||
*/
|
||||
export class LineDecoder {
|
||||
constructor() {
|
||||
_LineDecoder_buffer.set(this, void 0);
|
||||
_LineDecoder_carriageReturnIndex.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, new Uint8Array(), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
}
|
||||
decode(chunk) {
|
||||
if (chunk == null) {
|
||||
return [];
|
||||
}
|
||||
const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk)
|
||||
: typeof chunk === 'string' ? encodeUTF8(chunk)
|
||||
: chunk;
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, concatBytes([__classPrivateFieldGet(this, _LineDecoder_buffer, "f"), binaryChunk]), "f");
|
||||
const lines = [];
|
||||
let patternIndex;
|
||||
while ((patternIndex = findNewlineIndex(__classPrivateFieldGet(this, _LineDecoder_buffer, "f"), __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f"))) != null) {
|
||||
if (patternIndex.carriage && __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") == null) {
|
||||
// skip until we either get a corresponding `\n`, a new `\r` or nothing
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, patternIndex.index, "f");
|
||||
continue;
|
||||
}
|
||||
// we got double \r or \rtext\n
|
||||
if (__classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") != null &&
|
||||
(patternIndex.index !== __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") + 1 || patternIndex.carriage)) {
|
||||
lines.push(decodeUTF8(__classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(0, __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") - 1)));
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, __classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(__classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f")), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
continue;
|
||||
}
|
||||
const endIndex = __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") !== null ? patternIndex.preceding - 1 : patternIndex.preceding;
|
||||
const line = decodeUTF8(__classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(0, endIndex));
|
||||
lines.push(line);
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, __classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(patternIndex.index), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
}
|
||||
return lines;
|
||||
}
|
||||
flush() {
|
||||
if (!__classPrivateFieldGet(this, _LineDecoder_buffer, "f").length) {
|
||||
return [];
|
||||
}
|
||||
return this.decode('\n');
|
||||
}
|
||||
}
|
||||
_LineDecoder_buffer = new WeakMap(), _LineDecoder_carriageReturnIndex = new WeakMap();
|
||||
// prettier-ignore
|
||||
LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']);
|
||||
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g;
|
||||
/**
|
||||
* This function searches the buffer for the end patterns, (\r or \n)
|
||||
* and returns an object with the index preceding the matched newline and the
|
||||
* index after the newline char. `null` is returned if no new line is found.
|
||||
*
|
||||
* ```ts
|
||||
* findNewLineIndex('abc\ndef') -> { preceding: 2, index: 3 }
|
||||
* ```
|
||||
*/
|
||||
function findNewlineIndex(buffer, startIndex) {
|
||||
const newline = 0x0a; // \n
|
||||
const carriage = 0x0d; // \r
|
||||
for (let i = startIndex ?? 0; i < buffer.length; i++) {
|
||||
if (buffer[i] === newline) {
|
||||
return { preceding: i, index: i + 1, carriage: false };
|
||||
}
|
||||
if (buffer[i] === carriage) {
|
||||
return { preceding: i, index: i + 1, carriage: true };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
export function findDoubleNewlineIndex(buffer) {
|
||||
// This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n)
|
||||
// and returns the index right after the first occurrence of any pattern,
|
||||
// or -1 if none of the patterns are found.
|
||||
const newline = 0x0a; // \n
|
||||
const carriage = 0x0d; // \r
|
||||
for (let i = 0; i < buffer.length - 1; i++) {
|
||||
if (buffer[i] === newline && buffer[i + 1] === newline) {
|
||||
// \n\n
|
||||
return i + 2;
|
||||
}
|
||||
if (buffer[i] === carriage && buffer[i + 1] === carriage) {
|
||||
// \r\r
|
||||
return i + 2;
|
||||
}
|
||||
if (buffer[i] === carriage &&
|
||||
buffer[i + 1] === newline &&
|
||||
i + 3 < buffer.length &&
|
||||
buffer[i + 2] === carriage &&
|
||||
buffer[i + 3] === newline) {
|
||||
// \r\n\r\n
|
||||
return i + 4;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
//# sourceMappingURL=line.mjs.map
|
||||
157
extracted-source/node_modules/@anthropic-ai/sdk/internal/detect-platform.mjs
generated
vendored
157
extracted-source/node_modules/@anthropic-ai/sdk/internal/detect-platform.mjs
generated
vendored
@@ -1,157 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { VERSION } from "../version.mjs";
|
||||
export const isRunningInBrowser = () => {
|
||||
return (
|
||||
// @ts-ignore
|
||||
typeof window !== 'undefined' &&
|
||||
// @ts-ignore
|
||||
typeof window.document !== 'undefined' &&
|
||||
// @ts-ignore
|
||||
typeof navigator !== 'undefined');
|
||||
};
|
||||
/**
|
||||
* Note this does not detect 'browser'; for that, use getBrowserInfo().
|
||||
*/
|
||||
function getDetectedPlatform() {
|
||||
if (typeof Deno !== 'undefined' && Deno.build != null) {
|
||||
return 'deno';
|
||||
}
|
||||
if (typeof EdgeRuntime !== 'undefined') {
|
||||
return 'edge';
|
||||
}
|
||||
if (Object.prototype.toString.call(typeof globalThis.process !== 'undefined' ? globalThis.process : 0) === '[object process]') {
|
||||
return 'node';
|
||||
}
|
||||
return 'unknown';
|
||||
}
|
||||
const getPlatformProperties = () => {
|
||||
const detectedPlatform = getDetectedPlatform();
|
||||
if (detectedPlatform === 'deno') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': normalizePlatform(Deno.build.os),
|
||||
'X-Stainless-Arch': normalizeArch(Deno.build.arch),
|
||||
'X-Stainless-Runtime': 'deno',
|
||||
'X-Stainless-Runtime-Version': typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown',
|
||||
};
|
||||
}
|
||||
if (typeof EdgeRuntime !== 'undefined') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': `other:${EdgeRuntime}`,
|
||||
'X-Stainless-Runtime': 'edge',
|
||||
'X-Stainless-Runtime-Version': globalThis.process.version,
|
||||
};
|
||||
}
|
||||
// Check if Node.js
|
||||
if (detectedPlatform === 'node') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': normalizePlatform(globalThis.process.platform ?? 'unknown'),
|
||||
'X-Stainless-Arch': normalizeArch(globalThis.process.arch ?? 'unknown'),
|
||||
'X-Stainless-Runtime': 'node',
|
||||
'X-Stainless-Runtime-Version': globalThis.process.version ?? 'unknown',
|
||||
};
|
||||
}
|
||||
const browserInfo = getBrowserInfo();
|
||||
if (browserInfo) {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': 'unknown',
|
||||
'X-Stainless-Runtime': `browser:${browserInfo.browser}`,
|
||||
'X-Stainless-Runtime-Version': browserInfo.version,
|
||||
};
|
||||
}
|
||||
// TODO add support for Cloudflare workers, etc.
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': 'unknown',
|
||||
'X-Stainless-Runtime': 'unknown',
|
||||
'X-Stainless-Runtime-Version': 'unknown',
|
||||
};
|
||||
};
|
||||
// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts
|
||||
function getBrowserInfo() {
|
||||
if (typeof navigator === 'undefined' || !navigator) {
|
||||
return null;
|
||||
}
|
||||
// NOTE: The order matters here!
|
||||
const browserPatterns = [
|
||||
{ key: 'edge', pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'ie', pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'ie', pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'chrome', pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'firefox', pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'safari', pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ },
|
||||
];
|
||||
// Find the FIRST matching browser
|
||||
for (const { key, pattern } of browserPatterns) {
|
||||
const match = pattern.exec(navigator.userAgent);
|
||||
if (match) {
|
||||
const major = match[1] || 0;
|
||||
const minor = match[2] || 0;
|
||||
const patch = match[3] || 0;
|
||||
return { browser: key, version: `${major}.${minor}.${patch}` };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const normalizeArch = (arch) => {
|
||||
// Node docs:
|
||||
// - https://nodejs.org/api/process.html#processarch
|
||||
// Deno docs:
|
||||
// - https://doc.deno.land/deno/stable/~/Deno.build
|
||||
if (arch === 'x32')
|
||||
return 'x32';
|
||||
if (arch === 'x86_64' || arch === 'x64')
|
||||
return 'x64';
|
||||
if (arch === 'arm')
|
||||
return 'arm';
|
||||
if (arch === 'aarch64' || arch === 'arm64')
|
||||
return 'arm64';
|
||||
if (arch)
|
||||
return `other:${arch}`;
|
||||
return 'unknown';
|
||||
};
|
||||
const normalizePlatform = (platform) => {
|
||||
// Node platforms:
|
||||
// - https://nodejs.org/api/process.html#processplatform
|
||||
// Deno platforms:
|
||||
// - https://doc.deno.land/deno/stable/~/Deno.build
|
||||
// - https://github.com/denoland/deno/issues/14799
|
||||
platform = platform.toLowerCase();
|
||||
// NOTE: this iOS check is untested and may not work
|
||||
// Node does not work natively on IOS, there is a fork at
|
||||
// https://github.com/nodejs-mobile/nodejs-mobile
|
||||
// however it is unknown at the time of writing how to detect if it is running
|
||||
if (platform.includes('ios'))
|
||||
return 'iOS';
|
||||
if (platform === 'android')
|
||||
return 'Android';
|
||||
if (platform === 'darwin')
|
||||
return 'MacOS';
|
||||
if (platform === 'win32')
|
||||
return 'Windows';
|
||||
if (platform === 'freebsd')
|
||||
return 'FreeBSD';
|
||||
if (platform === 'openbsd')
|
||||
return 'OpenBSD';
|
||||
if (platform === 'linux')
|
||||
return 'Linux';
|
||||
if (platform)
|
||||
return `Other:${platform}`;
|
||||
return 'Unknown';
|
||||
};
|
||||
let _platformHeaders;
|
||||
export const getPlatformHeaders = () => {
|
||||
return (_platformHeaders ?? (_platformHeaders = getPlatformProperties()));
|
||||
};
|
||||
//# sourceMappingURL=detect-platform.mjs.map
|
||||
36
extracted-source/node_modules/@anthropic-ai/sdk/internal/errors.mjs
generated
vendored
36
extracted-source/node_modules/@anthropic-ai/sdk/internal/errors.mjs
generated
vendored
@@ -1,36 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function isAbortError(err) {
|
||||
return (typeof err === 'object' &&
|
||||
err !== null &&
|
||||
// Spec-compliant fetch implementations
|
||||
(('name' in err && err.name === 'AbortError') ||
|
||||
// Expo fetch
|
||||
('message' in err && String(err.message).includes('FetchRequestCanceledException'))));
|
||||
}
|
||||
export const castToError = (err) => {
|
||||
if (err instanceof Error)
|
||||
return err;
|
||||
if (typeof err === 'object' && err !== null) {
|
||||
try {
|
||||
if (Object.prototype.toString.call(err) === '[object Error]') {
|
||||
// @ts-ignore - not all envs have native support for cause yet
|
||||
const error = new Error(err.message, err.cause ? { cause: err.cause } : {});
|
||||
if (err.stack)
|
||||
error.stack = err.stack;
|
||||
// @ts-ignore - not all envs have native support for cause yet
|
||||
if (err.cause && !error.cause)
|
||||
error.cause = err.cause;
|
||||
if (err.name)
|
||||
error.name = err.name;
|
||||
return error;
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
return new Error(JSON.stringify(err));
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
return new Error(err);
|
||||
};
|
||||
//# sourceMappingURL=errors.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/headers.mjs
generated
vendored
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/headers.mjs
generated
vendored
@@ -1,74 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { isReadonlyArray } from "./utils/values.mjs";
|
||||
const brand_privateNullableHeaders = Symbol.for('brand.privateNullableHeaders');
|
||||
function* iterateHeaders(headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
if (brand_privateNullableHeaders in headers) {
|
||||
const { values, nulls } = headers;
|
||||
yield* values.entries();
|
||||
for (const name of nulls) {
|
||||
yield [name, null];
|
||||
}
|
||||
return;
|
||||
}
|
||||
let shouldClear = false;
|
||||
let iter;
|
||||
if (headers instanceof Headers) {
|
||||
iter = headers.entries();
|
||||
}
|
||||
else if (isReadonlyArray(headers)) {
|
||||
iter = headers;
|
||||
}
|
||||
else {
|
||||
shouldClear = true;
|
||||
iter = Object.entries(headers ?? {});
|
||||
}
|
||||
for (let row of iter) {
|
||||
const name = row[0];
|
||||
if (typeof name !== 'string')
|
||||
throw new TypeError('expected header name to be a string');
|
||||
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
||||
let didClear = false;
|
||||
for (const value of values) {
|
||||
if (value === undefined)
|
||||
continue;
|
||||
// Objects keys always overwrite older headers, they never append.
|
||||
// Yield a null to clear the header before adding the new values.
|
||||
if (shouldClear && !didClear) {
|
||||
didClear = true;
|
||||
yield [name, null];
|
||||
}
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
export const buildHeaders = (newHeaders) => {
|
||||
const targetHeaders = new Headers();
|
||||
const nullHeaders = new Set();
|
||||
for (const headers of newHeaders) {
|
||||
const seenHeaders = new Set();
|
||||
for (const [name, value] of iterateHeaders(headers)) {
|
||||
const lowerName = name.toLowerCase();
|
||||
if (!seenHeaders.has(lowerName)) {
|
||||
targetHeaders.delete(name);
|
||||
seenHeaders.add(lowerName);
|
||||
}
|
||||
if (value === null) {
|
||||
targetHeaders.delete(name);
|
||||
nullHeaders.add(lowerName);
|
||||
}
|
||||
else {
|
||||
targetHeaders.append(name, value);
|
||||
nullHeaders.delete(lowerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
||||
};
|
||||
export const isEmptyHeaders = (headers) => {
|
||||
for (const _ of iterateHeaders(headers))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
//# sourceMappingURL=headers.mjs.map
|
||||
56
extracted-source/node_modules/@anthropic-ai/sdk/internal/parse.mjs
generated
vendored
56
extracted-source/node_modules/@anthropic-ai/sdk/internal/parse.mjs
generated
vendored
@@ -1,56 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { Stream } from "../core/streaming.mjs";
|
||||
import { formatRequestDetails, loggerFor } from "./utils/log.mjs";
|
||||
export async function defaultParseResponse(client, props) {
|
||||
const { response, requestLogID, retryOfRequestLogID, startTime } = props;
|
||||
const body = await (async () => {
|
||||
if (props.options.stream) {
|
||||
loggerFor(client).debug('response', response.status, response.url, response.headers, response.body);
|
||||
// Note: there is an invariant here that isn't represented in the type system
|
||||
// that if you set `stream: true` the response type must also be `Stream<T>`
|
||||
if (props.options.__streamClass) {
|
||||
return props.options.__streamClass.fromSSEResponse(response, props.controller);
|
||||
}
|
||||
return Stream.fromSSEResponse(response, props.controller);
|
||||
}
|
||||
// fetch refuses to read the body when the status code is 204.
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
if (props.options.__binaryResponse) {
|
||||
return response;
|
||||
}
|
||||
const contentType = response.headers.get('content-type');
|
||||
const mediaType = contentType?.split(';')[0]?.trim();
|
||||
const isJSON = mediaType?.includes('application/json') || mediaType?.endsWith('+json');
|
||||
if (isJSON) {
|
||||
const contentLength = response.headers.get('content-length');
|
||||
if (contentLength === '0') {
|
||||
// if there is no content we can't do anything
|
||||
return undefined;
|
||||
}
|
||||
const json = await response.json();
|
||||
return addRequestID(json, response);
|
||||
}
|
||||
const text = await response.text();
|
||||
return text;
|
||||
})();
|
||||
loggerFor(client).debug(`[${requestLogID}] response parsed`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
body,
|
||||
durationMs: Date.now() - startTime,
|
||||
}));
|
||||
return body;
|
||||
}
|
||||
export function addRequestID(value, response) {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
return Object.defineProperty(value, '_request_id', {
|
||||
value: response.headers.get('request-id'),
|
||||
enumerable: false,
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=parse.mjs.map
|
||||
10
extracted-source/node_modules/@anthropic-ai/sdk/internal/request-options.mjs
generated
vendored
10
extracted-source/node_modules/@anthropic-ai/sdk/internal/request-options.mjs
generated
vendored
@@ -1,10 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export const FallbackEncoder = ({ headers, body }) => {
|
||||
return {
|
||||
bodyHeaders: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
};
|
||||
};
|
||||
//# sourceMappingURL=request-options.mjs.map
|
||||
85
extracted-source/node_modules/@anthropic-ai/sdk/internal/shims.mjs
generated
vendored
85
extracted-source/node_modules/@anthropic-ai/sdk/internal/shims.mjs
generated
vendored
@@ -1,85 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function getDefaultFetch() {
|
||||
if (typeof fetch !== 'undefined') {
|
||||
return fetch;
|
||||
}
|
||||
throw new Error('`fetch` is not defined as a global; Either pass `fetch` to the client, `new Anthropic({ fetch })` or polyfill the global, `globalThis.fetch = fetch`');
|
||||
}
|
||||
export function makeReadableStream(...args) {
|
||||
const ReadableStream = globalThis.ReadableStream;
|
||||
if (typeof ReadableStream === 'undefined') {
|
||||
// Note: All of the platforms / runtimes we officially support already define
|
||||
// `ReadableStream` as a global, so this should only ever be hit on unsupported runtimes.
|
||||
throw new Error('`ReadableStream` is not defined as a global; You will need to polyfill it, `globalThis.ReadableStream = ReadableStream`');
|
||||
}
|
||||
return new ReadableStream(...args);
|
||||
}
|
||||
export function ReadableStreamFrom(iterable) {
|
||||
let iter = Symbol.asyncIterator in iterable ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
|
||||
return makeReadableStream({
|
||||
start() { },
|
||||
async pull(controller) {
|
||||
const { done, value } = await iter.next();
|
||||
if (done) {
|
||||
controller.close();
|
||||
}
|
||||
else {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await iter.return?.();
|
||||
},
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Most browsers don't yet have async iterable support for ReadableStream,
|
||||
* and Node has a very different way of reading bytes from its "ReadableStream".
|
||||
*
|
||||
* This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
|
||||
*/
|
||||
export function ReadableStreamToAsyncIterable(stream) {
|
||||
if (stream[Symbol.asyncIterator])
|
||||
return stream;
|
||||
const reader = stream.getReader();
|
||||
return {
|
||||
async next() {
|
||||
try {
|
||||
const result = await reader.read();
|
||||
if (result?.done)
|
||||
reader.releaseLock(); // release lock when stream becomes closed
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
reader.releaseLock(); // release lock when stream becomes errored
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async return() {
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
return { done: true, value: undefined };
|
||||
},
|
||||
[Symbol.asyncIterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Cancels a ReadableStream we don't need to consume.
|
||||
* See https://undici.nodejs.org/#/?id=garbage-collection
|
||||
*/
|
||||
export async function CancelReadableStream(stream) {
|
||||
if (stream === null || typeof stream !== 'object')
|
||||
return;
|
||||
if (stream[Symbol.asyncIterator]) {
|
||||
await stream[Symbol.asyncIterator]().return?.();
|
||||
return;
|
||||
}
|
||||
const reader = stream.getReader();
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
}
|
||||
//# sourceMappingURL=shims.mjs.map
|
||||
93
extracted-source/node_modules/@anthropic-ai/sdk/internal/to-file.mjs
generated
vendored
93
extracted-source/node_modules/@anthropic-ai/sdk/internal/to-file.mjs
generated
vendored
@@ -1,93 +0,0 @@
|
||||
import { getName, makeFile, isAsyncIterable } from "./uploads.mjs";
|
||||
import { checkFileSupport } from "./uploads.mjs";
|
||||
/**
|
||||
* This check adds the arrayBuffer() method type because it is available and used at runtime
|
||||
*/
|
||||
const isBlobLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.size === 'number' &&
|
||||
typeof value.type === 'string' &&
|
||||
typeof value.text === 'function' &&
|
||||
typeof value.slice === 'function' &&
|
||||
typeof value.arrayBuffer === 'function';
|
||||
/**
|
||||
* This check adds the arrayBuffer() method type because it is available and used at runtime
|
||||
*/
|
||||
const isFileLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.name === 'string' &&
|
||||
typeof value.lastModified === 'number' &&
|
||||
isBlobLike(value);
|
||||
const isResponseLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.url === 'string' &&
|
||||
typeof value.blob === 'function';
|
||||
/**
|
||||
* Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats
|
||||
* @param value the raw content of the file. Can be an {@link Uploadable}, BlobLikePart, or AsyncIterable of BlobLikeParts
|
||||
* @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible
|
||||
* @param {Object=} options additional properties
|
||||
* @param {string=} options.type the MIME type of the content
|
||||
* @param {number=} options.lastModified the last modified timestamp
|
||||
* @returns a {@link File} with the given properties
|
||||
*/
|
||||
export async function toFile(value, name, options) {
|
||||
checkFileSupport();
|
||||
// If it's a promise, resolve it.
|
||||
value = await value;
|
||||
name || (name = getName(value, true));
|
||||
// If we've been given a `File` we don't need to do anything if the name / options
|
||||
// have not been customised.
|
||||
if (isFileLike(value)) {
|
||||
if (value instanceof File && name == null && options == null) {
|
||||
return value;
|
||||
}
|
||||
return makeFile([await value.arrayBuffer()], name ?? value.name, {
|
||||
type: value.type,
|
||||
lastModified: value.lastModified,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
if (isResponseLike(value)) {
|
||||
const blob = await value.blob();
|
||||
name || (name = new URL(value.url).pathname.split(/[\\/]/).pop());
|
||||
return makeFile(await getBytes(blob), name, options);
|
||||
}
|
||||
const parts = await getBytes(value);
|
||||
if (!options?.type) {
|
||||
const type = parts.find((part) => typeof part === 'object' && 'type' in part && part.type);
|
||||
if (typeof type === 'string') {
|
||||
options = { ...options, type };
|
||||
}
|
||||
}
|
||||
return makeFile(parts, name, options);
|
||||
}
|
||||
async function getBytes(value) {
|
||||
let parts = [];
|
||||
if (typeof value === 'string' ||
|
||||
ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc.
|
||||
value instanceof ArrayBuffer) {
|
||||
parts.push(value);
|
||||
}
|
||||
else if (isBlobLike(value)) {
|
||||
parts.push(value instanceof Blob ? value : await value.arrayBuffer());
|
||||
}
|
||||
else if (isAsyncIterable(value) // includes Readable, ReadableStream, etc.
|
||||
) {
|
||||
for await (const chunk of value) {
|
||||
parts.push(...(await getBytes(chunk))); // TODO, consider validating?
|
||||
}
|
||||
}
|
||||
else {
|
||||
const constructor = value?.constructor?.name;
|
||||
throw new Error(`Unexpected data type: ${typeof value}${constructor ? `; constructor: ${constructor}` : ''}${propsForError(value)}`);
|
||||
}
|
||||
return parts;
|
||||
}
|
||||
function propsForError(value) {
|
||||
if (typeof value !== 'object' || value === null)
|
||||
return '';
|
||||
const props = Object.getOwnPropertyNames(value);
|
||||
return `; props: [${props.map((p) => `"${p}"`).join(', ')}]`;
|
||||
}
|
||||
//# sourceMappingURL=to-file.mjs.map
|
||||
17
extracted-source/node_modules/@anthropic-ai/sdk/internal/tslib.mjs
generated
vendored
17
extracted-source/node_modules/@anthropic-ai/sdk/internal/tslib.mjs
generated
vendored
@@ -1,17 +0,0 @@
|
||||
function __classPrivateFieldSet(receiver, state, value, kind, f) {
|
||||
if (kind === "m")
|
||||
throw new TypeError("Private method is not writable");
|
||||
if (kind === "a" && !f)
|
||||
throw new TypeError("Private accessor was defined without a setter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
||||
throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||||
return kind === "a" ? f.call(receiver, value) : f ? (f.value = value) : state.set(receiver, value), value;
|
||||
}
|
||||
function __classPrivateFieldGet(receiver, state, kind, f) {
|
||||
if (kind === "a" && !f)
|
||||
throw new TypeError("Private accessor was defined without a getter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
||||
throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||||
}
|
||||
export { __classPrivateFieldSet, __classPrivateFieldGet };
|
||||
135
extracted-source/node_modules/@anthropic-ai/sdk/internal/uploads.mjs
generated
vendored
135
extracted-source/node_modules/@anthropic-ai/sdk/internal/uploads.mjs
generated
vendored
@@ -1,135 +0,0 @@
|
||||
import { ReadableStreamFrom } from "./shims.mjs";
|
||||
export const checkFileSupport = () => {
|
||||
if (typeof File === 'undefined') {
|
||||
const { process } = globalThis;
|
||||
const isOldNode = typeof process?.versions?.node === 'string' && parseInt(process.versions.node.split('.')) < 20;
|
||||
throw new Error('`File` is not defined as a global, which is required for file uploads.' +
|
||||
(isOldNode ?
|
||||
" Update to Node 20 LTS or newer, or set `globalThis.File` to `import('node:buffer').File`."
|
||||
: ''));
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Construct a `File` instance. This is used to ensure a helpful error is thrown
|
||||
* for environments that don't define a global `File` yet.
|
||||
*/
|
||||
export function makeFile(fileBits, fileName, options) {
|
||||
checkFileSupport();
|
||||
return new File(fileBits, fileName ?? 'unknown_file', options);
|
||||
}
|
||||
export function getName(value, stripPath) {
|
||||
const val = (typeof value === 'object' &&
|
||||
value !== null &&
|
||||
(('name' in value && value.name && String(value.name)) ||
|
||||
('url' in value && value.url && String(value.url)) ||
|
||||
('filename' in value && value.filename && String(value.filename)) ||
|
||||
('path' in value && value.path && String(value.path)))) ||
|
||||
'';
|
||||
return stripPath ? val.split(/[\\/]/).pop() || undefined : val;
|
||||
}
|
||||
export const isAsyncIterable = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function';
|
||||
/**
|
||||
* Returns a multipart/form-data request if any part of the given request body contains a File / Blob value.
|
||||
* Otherwise returns the request as is.
|
||||
*/
|
||||
export const maybeMultipartFormRequestOptions = async (opts, fetch) => {
|
||||
if (!hasUploadableValue(opts.body))
|
||||
return opts;
|
||||
return { ...opts, body: await createForm(opts.body, fetch) };
|
||||
};
|
||||
export const multipartFormRequestOptions = async (opts, fetch, stripFilenames = true) => {
|
||||
return { ...opts, body: await createForm(opts.body, fetch, stripFilenames) };
|
||||
};
|
||||
const supportsFormDataMap = /* @__PURE__ */ new WeakMap();
|
||||
/**
|
||||
* node-fetch doesn't support the global FormData object in recent node versions. Instead of sending
|
||||
* properly-encoded form data, it just stringifies the object, resulting in a request body of "[object FormData]".
|
||||
* This function detects if the fetch function provided supports the global FormData object to avoid
|
||||
* confusing error messages later on.
|
||||
*/
|
||||
function supportsFormData(fetchObject) {
|
||||
const fetch = typeof fetchObject === 'function' ? fetchObject : fetchObject.fetch;
|
||||
const cached = supportsFormDataMap.get(fetch);
|
||||
if (cached)
|
||||
return cached;
|
||||
const promise = (async () => {
|
||||
try {
|
||||
const FetchResponse = ('Response' in fetch ?
|
||||
fetch.Response
|
||||
: (await fetch('data:,')).constructor);
|
||||
const data = new FormData();
|
||||
if (data.toString() === (await new FetchResponse(data).text())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
// avoid false negatives
|
||||
return true;
|
||||
}
|
||||
})();
|
||||
supportsFormDataMap.set(fetch, promise);
|
||||
return promise;
|
||||
}
|
||||
export const createForm = async (body, fetch, stripFilenames = true) => {
|
||||
if (!(await supportsFormData(fetch))) {
|
||||
throw new TypeError('The provided fetch function does not support file uploads with the current global FormData class.');
|
||||
}
|
||||
const form = new FormData();
|
||||
await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value, stripFilenames)));
|
||||
return form;
|
||||
};
|
||||
// We check for Blob not File because Bun.File doesn't inherit from File,
|
||||
// but they both inherit from Blob and have a `name` property at runtime.
|
||||
const isNamedBlob = (value) => value instanceof Blob && 'name' in value;
|
||||
const isUploadable = (value) => typeof value === 'object' &&
|
||||
value !== null &&
|
||||
(value instanceof Response || isAsyncIterable(value) || isNamedBlob(value));
|
||||
const hasUploadableValue = (value) => {
|
||||
if (isUploadable(value))
|
||||
return true;
|
||||
if (Array.isArray(value))
|
||||
return value.some(hasUploadableValue);
|
||||
if (value && typeof value === 'object') {
|
||||
for (const k in value) {
|
||||
if (hasUploadableValue(value[k]))
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const addFormValue = async (form, key, value, stripFilenames) => {
|
||||
if (value === undefined)
|
||||
return;
|
||||
if (value == null) {
|
||||
throw new TypeError(`Received null for "${key}"; to pass null in FormData, you must use the string 'null'`);
|
||||
}
|
||||
// TODO: make nested formats configurable
|
||||
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
|
||||
form.append(key, String(value));
|
||||
}
|
||||
else if (value instanceof Response) {
|
||||
let options = {};
|
||||
const contentType = value.headers.get('Content-Type');
|
||||
if (contentType) {
|
||||
options = { type: contentType };
|
||||
}
|
||||
form.append(key, makeFile([await value.blob()], getName(value, stripFilenames), options));
|
||||
}
|
||||
else if (isAsyncIterable(value)) {
|
||||
form.append(key, makeFile([await new Response(ReadableStreamFrom(value)).blob()], getName(value, stripFilenames)));
|
||||
}
|
||||
else if (isNamedBlob(value)) {
|
||||
form.append(key, makeFile([value], getName(value, stripFilenames), { type: value.type }));
|
||||
}
|
||||
else if (Array.isArray(value)) {
|
||||
await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry, stripFilenames)));
|
||||
}
|
||||
else if (typeof value === 'object') {
|
||||
await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop, stripFilenames)));
|
||||
}
|
||||
else {
|
||||
throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`);
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=uploads.mjs.map
|
||||
26
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/bytes.mjs
generated
vendored
26
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/bytes.mjs
generated
vendored
@@ -1,26 +0,0 @@
|
||||
export function concatBytes(buffers) {
|
||||
let length = 0;
|
||||
for (const buffer of buffers) {
|
||||
length += buffer.length;
|
||||
}
|
||||
const output = new Uint8Array(length);
|
||||
let index = 0;
|
||||
for (const buffer of buffers) {
|
||||
output.set(buffer, index);
|
||||
index += buffer.length;
|
||||
}
|
||||
return output;
|
||||
}
|
||||
let encodeUTF8_;
|
||||
export function encodeUTF8(str) {
|
||||
let encoder;
|
||||
return (encodeUTF8_ ??
|
||||
((encoder = new globalThis.TextEncoder()), (encodeUTF8_ = encoder.encode.bind(encoder))))(str);
|
||||
}
|
||||
let decodeUTF8_;
|
||||
export function decodeUTF8(bytes) {
|
||||
let decoder;
|
||||
return (decodeUTF8_ ??
|
||||
((decoder = new globalThis.TextDecoder()), (decodeUTF8_ = decoder.decode.bind(decoder))))(bytes);
|
||||
}
|
||||
//# sourceMappingURL=bytes.mjs.map
|
||||
18
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/env.mjs
generated
vendored
18
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/env.mjs
generated
vendored
@@ -1,18 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* Read an environment variable.
|
||||
*
|
||||
* Trims beginning and trailing whitespace.
|
||||
*
|
||||
* Will return undefined if the environment variable doesn't exist or cannot be accessed.
|
||||
*/
|
||||
export const readEnv = (env) => {
|
||||
if (typeof globalThis.process !== 'undefined') {
|
||||
return globalThis.process.env?.[env]?.trim() ?? undefined;
|
||||
}
|
||||
if (typeof globalThis.Deno !== 'undefined') {
|
||||
return globalThis.Deno.env?.get?.(env)?.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
//# sourceMappingURL=env.mjs.map
|
||||
80
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/log.mjs
generated
vendored
80
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/log.mjs
generated
vendored
@@ -1,80 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { hasOwn } from "./values.mjs";
|
||||
const levelNumbers = {
|
||||
off: 0,
|
||||
error: 200,
|
||||
warn: 300,
|
||||
info: 400,
|
||||
debug: 500,
|
||||
};
|
||||
export const parseLogLevel = (maybeLevel, sourceName, client) => {
|
||||
if (!maybeLevel) {
|
||||
return undefined;
|
||||
}
|
||||
if (hasOwn(levelNumbers, maybeLevel)) {
|
||||
return maybeLevel;
|
||||
}
|
||||
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
||||
return undefined;
|
||||
};
|
||||
function noop() { }
|
||||
function makeLogFn(fnLevel, logger, logLevel) {
|
||||
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
||||
return noop;
|
||||
}
|
||||
else {
|
||||
// Don't wrap logger functions, we want the stacktrace intact!
|
||||
return logger[fnLevel].bind(logger);
|
||||
}
|
||||
}
|
||||
const noopLogger = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
debug: noop,
|
||||
};
|
||||
let cachedLoggers = /* @__PURE__ */ new WeakMap();
|
||||
export function loggerFor(client) {
|
||||
const logger = client.logger;
|
||||
const logLevel = client.logLevel ?? 'off';
|
||||
if (!logger) {
|
||||
return noopLogger;
|
||||
}
|
||||
const cachedLogger = cachedLoggers.get(logger);
|
||||
if (cachedLogger && cachedLogger[0] === logLevel) {
|
||||
return cachedLogger[1];
|
||||
}
|
||||
const levelLogger = {
|
||||
error: makeLogFn('error', logger, logLevel),
|
||||
warn: makeLogFn('warn', logger, logLevel),
|
||||
info: makeLogFn('info', logger, logLevel),
|
||||
debug: makeLogFn('debug', logger, logLevel),
|
||||
};
|
||||
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
||||
return levelLogger;
|
||||
}
|
||||
export const formatRequestDetails = (details) => {
|
||||
if (details.options) {
|
||||
details.options = { ...details.options };
|
||||
delete details.options['headers']; // redundant + leaks internals
|
||||
}
|
||||
if (details.headers) {
|
||||
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
||||
name,
|
||||
(name.toLowerCase() === 'x-api-key' ||
|
||||
name.toLowerCase() === 'authorization' ||
|
||||
name.toLowerCase() === 'cookie' ||
|
||||
name.toLowerCase() === 'set-cookie') ?
|
||||
'***'
|
||||
: value,
|
||||
]));
|
||||
}
|
||||
if ('retryOfRequestLogID' in details) {
|
||||
if (details.retryOfRequestLogID) {
|
||||
details.retryOf = details.retryOfRequestLogID;
|
||||
}
|
||||
delete details.retryOfRequestLogID;
|
||||
}
|
||||
return details;
|
||||
};
|
||||
//# sourceMappingURL=log.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/path.mjs
generated
vendored
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/path.mjs
generated
vendored
@@ -1,74 +0,0 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
/**
|
||||
* Percent-encode everything that isn't safe to have in a path without encoding safe chars.
|
||||
*
|
||||
* Taken from https://datatracker.ietf.org/doc/html/rfc3986#section-3.3:
|
||||
* > unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
|
||||
* > sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
|
||||
* > pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
|
||||
*/
|
||||
export function encodeURIPath(str) {
|
||||
return str.replace(/[^A-Za-z0-9\-._~!$&'()*+,;=:@]+/g, encodeURIComponent);
|
||||
}
|
||||
const EMPTY = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.create(null));
|
||||
export const createPathTagFunction = (pathEncoder = encodeURIPath) => function path(statics, ...params) {
|
||||
// If there are no params, no processing is needed.
|
||||
if (statics.length === 1)
|
||||
return statics[0];
|
||||
let postPath = false;
|
||||
const invalidSegments = [];
|
||||
const path = statics.reduce((previousValue, currentValue, index) => {
|
||||
if (/[?#]/.test(currentValue)) {
|
||||
postPath = true;
|
||||
}
|
||||
const value = params[index];
|
||||
let encoded = (postPath ? encodeURIComponent : pathEncoder)('' + value);
|
||||
if (index !== params.length &&
|
||||
(value == null ||
|
||||
(typeof value === 'object' &&
|
||||
// handle values from other realms
|
||||
value.toString ===
|
||||
Object.getPrototypeOf(Object.getPrototypeOf(value.hasOwnProperty ?? EMPTY) ?? EMPTY)
|
||||
?.toString))) {
|
||||
encoded = value + '';
|
||||
invalidSegments.push({
|
||||
start: previousValue.length + currentValue.length,
|
||||
length: encoded.length,
|
||||
error: `Value of type ${Object.prototype.toString
|
||||
.call(value)
|
||||
.slice(8, -1)} is not a valid path parameter`,
|
||||
});
|
||||
}
|
||||
return previousValue + currentValue + (index === params.length ? '' : encoded);
|
||||
}, '');
|
||||
const pathOnly = path.split(/[?#]/, 1)[0];
|
||||
const invalidSegmentPattern = /(?<=^|\/)(?:\.|%2e){1,2}(?=\/|$)/gi;
|
||||
let match;
|
||||
// Find all invalid segments
|
||||
while ((match = invalidSegmentPattern.exec(pathOnly)) !== null) {
|
||||
invalidSegments.push({
|
||||
start: match.index,
|
||||
length: match[0].length,
|
||||
error: `Value "${match[0]}" can\'t be safely passed as a path parameter`,
|
||||
});
|
||||
}
|
||||
invalidSegments.sort((a, b) => a.start - b.start);
|
||||
if (invalidSegments.length > 0) {
|
||||
let lastEnd = 0;
|
||||
const underline = invalidSegments.reduce((acc, segment) => {
|
||||
const spaces = ' '.repeat(segment.start - lastEnd);
|
||||
const arrows = '^'.repeat(segment.length);
|
||||
lastEnd = segment.start + segment.length;
|
||||
return acc + spaces + arrows;
|
||||
}, '');
|
||||
throw new AnthropicError(`Path parameters result in path with invalid segments:\n${invalidSegments
|
||||
.map((e) => e.error)
|
||||
.join('\n')}\n${path}\n${underline}`);
|
||||
}
|
||||
return path;
|
||||
};
|
||||
/**
|
||||
* URI-encodes path params and ensures no unsafe /./ or /../ path segments are introduced.
|
||||
*/
|
||||
export const path = /* @__PURE__ */ createPathTagFunction(encodeURIPath);
|
||||
//# sourceMappingURL=path.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/sleep.mjs
generated
vendored
3
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/sleep.mjs
generated
vendored
@@ -1,3 +0,0 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
//# sourceMappingURL=sleep.mjs.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user