mirror of
https://github.com/tvytlx/ai-agent-deep-dive.git
synced 2026-04-05 00:24:50 +08:00
Add extracted source directory and README navigation
This commit is contained in:
163
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/AWS_restJson1.mjs
generated
vendored
Normal file
163
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/AWS_restJson1.mjs
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
// Copied from https://github.com/aws/aws-sdk-js-v3/blob/bee66fbd2a519a16b57c787b2689af857af720af/clients/client-bedrock-runtime/src/protocols/Aws_restJson1.ts
|
||||
// Modified to remove unnecessary code (we only need to call `de_ResponseStream`) and to adjust imports.
|
||||
import { collectBody, decorateServiceException as __decorateServiceException, expectInt32 as __expectInt32, expectString as __expectString, map, take, } from '@smithy/smithy-client';
|
||||
import { InternalServerException, ModelStreamErrorException, ThrottlingException, ValidationException, } from '@aws-sdk/client-bedrock-runtime';
|
||||
/**
|
||||
* deserializeAws_restJson1InternalServerExceptionRes
|
||||
*/
|
||||
const de_InternalServerExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new InternalServerException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ModelStreamErrorExceptionRes
|
||||
*/
|
||||
const de_ModelStreamErrorExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
originalMessage: __expectString,
|
||||
originalStatusCode: __expectInt32,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ModelStreamErrorException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ThrottlingExceptionRes
|
||||
*/
|
||||
const de_ThrottlingExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ThrottlingException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ValidationExceptionRes
|
||||
*/
|
||||
const de_ValidationExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ValidationException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ResponseStream
|
||||
*/
|
||||
export const de_ResponseStream = (output, context) => {
|
||||
return context.eventStreamMarshaller.deserialize(output, async (event) => {
|
||||
if (event['chunk'] != null) {
|
||||
return {
|
||||
chunk: await de_PayloadPart_event(event['chunk'], context),
|
||||
};
|
||||
}
|
||||
if (event['internalServerException'] != null) {
|
||||
return {
|
||||
internalServerException: await de_InternalServerException_event(event['internalServerException'], context),
|
||||
};
|
||||
}
|
||||
if (event['modelStreamErrorException'] != null) {
|
||||
return {
|
||||
modelStreamErrorException: await de_ModelStreamErrorException_event(event['modelStreamErrorException'], context),
|
||||
};
|
||||
}
|
||||
if (event['validationException'] != null) {
|
||||
return {
|
||||
validationException: await de_ValidationException_event(event['validationException'], context),
|
||||
};
|
||||
}
|
||||
if (event['throttlingException'] != null) {
|
||||
return {
|
||||
throttlingException: await de_ThrottlingException_event(event['throttlingException'], context),
|
||||
};
|
||||
}
|
||||
return { $unknown: output };
|
||||
});
|
||||
};
|
||||
const de_InternalServerException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_InternalServerExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_ModelStreamErrorException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ModelStreamErrorExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_PayloadPart_event = async (output, context) => {
|
||||
const contents = {};
|
||||
const data = await parseBody(output.body, context);
|
||||
Object.assign(contents, de_PayloadPart(data, context));
|
||||
return contents;
|
||||
};
|
||||
const de_ThrottlingException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ThrottlingExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_ValidationException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ValidationExceptionRes(parsedOutput, context);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1PayloadPart
|
||||
*/
|
||||
const de_PayloadPart = (output, context) => {
|
||||
return take(output, {
|
||||
bytes: context.base64Decoder,
|
||||
});
|
||||
};
|
||||
const deserializeMetadata = (output) => ({
|
||||
httpStatusCode: output.statusCode,
|
||||
requestId: output.headers['x-amzn-requestid'] ??
|
||||
output.headers['x-amzn-request-id'] ??
|
||||
output.headers['x-amz-request-id'] ??
|
||||
'',
|
||||
extendedRequestId: output.headers['x-amz-id-2'] ?? '',
|
||||
cfId: output.headers['x-amz-cf-id'] ?? '',
|
||||
});
|
||||
// Encode Uint8Array data into string with utf-8.
|
||||
const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body));
|
||||
const parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => {
|
||||
if (encoded.length) {
|
||||
return JSON.parse(encoded);
|
||||
}
|
||||
return {};
|
||||
});
|
||||
//# sourceMappingURL=AWS_restJson1.mjs.map
|
||||
124
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/client.mjs
generated
vendored
Normal file
124
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/client.mjs
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
import { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
import * as Resources from '@anthropic-ai/sdk/resources/index';
|
||||
import { getAuthHeaders } from "./core/auth.mjs";
|
||||
import { Stream } from "./core/streaming.mjs";
|
||||
import { readEnv } from "./internal/utils/env.mjs";
|
||||
import { isObj } from "./internal/utils/values.mjs";
|
||||
import { buildHeaders } from "./internal/headers.mjs";
|
||||
import { path } from "./internal/utils/path.mjs";
|
||||
export { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
const DEFAULT_VERSION = 'bedrock-2023-05-31';
|
||||
const MODEL_ENDPOINTS = new Set(['/v1/complete', '/v1/messages', '/v1/messages?beta=true']);
|
||||
/** API Client for interfacing with the Anthropic Bedrock API. */
|
||||
export class AnthropicBedrock extends BaseAnthropic {
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic Bedrock API.
|
||||
*
|
||||
* @param {string | null | undefined} [opts.awsSecretKey]
|
||||
* @param {string | null | undefined} [opts.awsAccessKey]
|
||||
* @param {string | undefined} [opts.awsRegion=process.env['AWS_REGION'] ?? us-east-1]
|
||||
* @param {string | null | undefined} [opts.awsSessionToken]
|
||||
* @param {(() => Promise<AwsCredentialIdentityProvider>) | null} [opts.providerChainResolver] - Custom provider chain resolver for AWS credentials. Useful for non-Node environments.
|
||||
* @param {string} [opts.baseURL=process.env['ANTHROPIC_BEDROCK_BASE_URL'] ?? https://bedrock-runtime.${this.awsRegion}.amazonaws.com] - Override the default base URL for the API.
|
||||
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
||||
* @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls.
|
||||
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
||||
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
||||
* @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API.
|
||||
* @param {Record<string, string | undefined>} opts.defaultQuery - Default query parameters to include with every request to the API.
|
||||
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
||||
* @param {boolean} [opts.skipAuth=false] - Skip authentication for this request. This is useful if you have an internal proxy that handles authentication for you.
|
||||
*/
|
||||
constructor({ awsRegion = readEnv('AWS_REGION') ?? 'us-east-1', baseURL = readEnv('ANTHROPIC_BEDROCK_BASE_URL') ?? `https://bedrock-runtime.${awsRegion}.amazonaws.com`, awsSecretKey = null, awsAccessKey = null, awsSessionToken = null, providerChainResolver = null, ...opts } = {}) {
|
||||
super({
|
||||
baseURL,
|
||||
...opts,
|
||||
});
|
||||
this.skipAuth = false;
|
||||
this.messages = makeMessagesResource(this);
|
||||
this.completions = new Resources.Completions(this);
|
||||
this.beta = makeBetaResource(this);
|
||||
this.awsSecretKey = awsSecretKey;
|
||||
this.awsAccessKey = awsAccessKey;
|
||||
this.awsRegion = awsRegion;
|
||||
this.awsSessionToken = awsSessionToken;
|
||||
this.skipAuth = opts.skipAuth ?? false;
|
||||
this.providerChainResolver = providerChainResolver;
|
||||
}
|
||||
validateHeaders() {
|
||||
// auth validation is handled in prepareRequest since it needs to be async
|
||||
}
|
||||
async prepareRequest(request, { url, options }) {
|
||||
if (this.skipAuth) {
|
||||
return;
|
||||
}
|
||||
const regionName = this.awsRegion;
|
||||
if (!regionName) {
|
||||
throw new Error('Expected `awsRegion` option to be passed to the client or the `AWS_REGION` environment variable to be present');
|
||||
}
|
||||
const headers = await getAuthHeaders(request, {
|
||||
url,
|
||||
regionName,
|
||||
awsAccessKey: this.awsAccessKey,
|
||||
awsSecretKey: this.awsSecretKey,
|
||||
awsSessionToken: this.awsSessionToken,
|
||||
fetchOptions: this.fetchOptions,
|
||||
providerChainResolver: this.providerChainResolver,
|
||||
});
|
||||
request.headers = buildHeaders([headers, request.headers]).values;
|
||||
}
|
||||
async buildRequest(options) {
|
||||
options.__streamClass = Stream;
|
||||
if (isObj(options.body)) {
|
||||
// create a shallow copy of the request body so that code that mutates it later
|
||||
// doesn't mutate the original user-provided object
|
||||
options.body = { ...options.body };
|
||||
}
|
||||
if (isObj(options.body)) {
|
||||
if (!options.body['anthropic_version']) {
|
||||
options.body['anthropic_version'] = DEFAULT_VERSION;
|
||||
}
|
||||
if (options.headers && !options.body['anthropic_beta']) {
|
||||
const betas = buildHeaders([options.headers]).values.get('anthropic-beta');
|
||||
if (betas != null) {
|
||||
options.body['anthropic_beta'] = betas.split(',');
|
||||
}
|
||||
}
|
||||
}
|
||||
if (MODEL_ENDPOINTS.has(options.path) && options.method === 'post') {
|
||||
if (!isObj(options.body)) {
|
||||
throw new Error('Expected request body to be an object for post /v1/messages');
|
||||
}
|
||||
const model = options.body['model'];
|
||||
options.body['model'] = undefined;
|
||||
const stream = options.body['stream'];
|
||||
options.body['stream'] = undefined;
|
||||
if (stream) {
|
||||
options.path = path `/model/${model}/invoke-with-response-stream`;
|
||||
}
|
||||
else {
|
||||
options.path = path `/model/${model}/invoke`;
|
||||
}
|
||||
}
|
||||
return super.buildRequest(options);
|
||||
}
|
||||
}
|
||||
function makeMessagesResource(client) {
|
||||
const resource = new Resources.Messages(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.batches;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.countTokens;
|
||||
return resource;
|
||||
}
|
||||
function makeBetaResource(client) {
|
||||
const resource = new Resources.Beta(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.promptCaching;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.batches;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.countTokens;
|
||||
return resource;
|
||||
}
|
||||
//# sourceMappingURL=client.mjs.map
|
||||
82
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/auth.mjs
generated
vendored
Normal file
82
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/auth.mjs
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
import { Sha256 } from '@aws-crypto/sha256-js';
|
||||
import { FetchHttpHandler } from '@smithy/fetch-http-handler';
|
||||
import { HttpRequest } from '@smithy/protocol-http';
|
||||
import { SignatureV4 } from '@smithy/signature-v4';
|
||||
import assert from 'assert';
|
||||
const DEFAULT_PROVIDER_CHAIN_RESOLVER = () => import('@aws-sdk/credential-providers').then(({ fromNodeProviderChain }) => fromNodeProviderChain({
|
||||
clientConfig: {
|
||||
requestHandler: new FetchHttpHandler({
|
||||
requestInit: (httpRequest) => {
|
||||
return {
|
||||
...httpRequest,
|
||||
};
|
||||
},
|
||||
}),
|
||||
},
|
||||
}))
|
||||
.catch((error) => {
|
||||
throw new Error(`Failed to import '@aws-sdk/credential-providers'.` +
|
||||
`You can provide a custom \`providerChainResolver\` in the client options if your runtime does not have access to '@aws-sdk/credential-providers': ` +
|
||||
`\`new AnthropicBedrock({ providerChainResolver })\` ` +
|
||||
`Original error: ${error.message}`);
|
||||
});
|
||||
export const getAuthHeaders = async (req, props) => {
|
||||
assert(req.method, 'Expected request method property to be set');
|
||||
const providerChain = await (props.providerChainResolver ?
|
||||
props.providerChainResolver()
|
||||
: DEFAULT_PROVIDER_CHAIN_RESOLVER());
|
||||
const credentials = await withTempEnv(() => {
|
||||
// Temporarily set the appropriate environment variables if we've been
|
||||
// explicitly given credentials so that the credentials provider can
|
||||
// resolve them.
|
||||
//
|
||||
// Note: the environment provider is only not run first if the `AWS_PROFILE`
|
||||
// environment variable is set.
|
||||
// https://github.com/aws/aws-sdk-js-v3/blob/44a18a34b2c93feccdfcd162928d13e6dbdcaf30/packages/credential-provider-node/src/defaultProvider.ts#L49
|
||||
if (props.awsAccessKey) {
|
||||
process.env['AWS_ACCESS_KEY_ID'] = props.awsAccessKey;
|
||||
}
|
||||
if (props.awsSecretKey) {
|
||||
process.env['AWS_SECRET_ACCESS_KEY'] = props.awsSecretKey;
|
||||
}
|
||||
if (props.awsSessionToken) {
|
||||
process.env['AWS_SESSION_TOKEN'] = props.awsSessionToken;
|
||||
}
|
||||
}, () => providerChain());
|
||||
const signer = new SignatureV4({
|
||||
service: 'bedrock',
|
||||
region: props.regionName,
|
||||
credentials,
|
||||
sha256: Sha256,
|
||||
});
|
||||
const url = new URL(props.url);
|
||||
const headers = !req.headers ? {}
|
||||
: Symbol.iterator in req.headers ?
|
||||
Object.fromEntries(Array.from(req.headers).map((header) => [...header]))
|
||||
: { ...req.headers };
|
||||
// The connection header may be stripped by a proxy somewhere, so the receiver
|
||||
// of this message may not see this header, so we remove it from the set of headers
|
||||
// that are signed.
|
||||
delete headers['connection'];
|
||||
headers['host'] = url.hostname;
|
||||
const request = new HttpRequest({
|
||||
method: req.method.toUpperCase(),
|
||||
protocol: url.protocol,
|
||||
path: url.pathname,
|
||||
headers,
|
||||
body: req.body,
|
||||
});
|
||||
const signed = await signer.sign(request);
|
||||
return signed.headers;
|
||||
};
|
||||
const withTempEnv = async (updateEnv, fn) => {
|
||||
const previousEnv = { ...process.env };
|
||||
try {
|
||||
updateEnv();
|
||||
return await fn();
|
||||
}
|
||||
finally {
|
||||
process.env = previousEnv;
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=auth.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/error.mjs
generated
vendored
Normal file
2
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/error.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from '@anthropic-ai/sdk/core/error';
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
108
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/streaming.mjs
generated
vendored
Normal file
108
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/streaming.mjs
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
import { EventStreamMarshaller } from '@smithy/eventstream-serde-node';
|
||||
import { fromBase64, toBase64 } from '@smithy/util-base64';
|
||||
import { streamCollector } from '@smithy/fetch-http-handler';
|
||||
import { Stream as CoreStream } from '@anthropic-ai/sdk/streaming';
|
||||
import { AnthropicError } from '@anthropic-ai/sdk/error';
|
||||
import { APIError } from '@anthropic-ai/sdk';
|
||||
import { de_ResponseStream } from "../AWS_restJson1.mjs";
|
||||
import { ReadableStreamToAsyncIterable } from "../internal/shims.mjs";
|
||||
import { safeJSON } from "../internal/utils/values.mjs";
|
||||
import { loggerFor } from "../internal/utils/log.mjs";
|
||||
export const toUtf8 = (input) => new TextDecoder('utf-8').decode(input);
|
||||
export const fromUtf8 = (input) => new TextEncoder().encode(input);
|
||||
// `de_ResponseStream` parses a Bedrock response stream and emits events as they are found.
|
||||
// It requires a "context" argument which has many fields, but for what we're using it for
|
||||
// it only needs this.
|
||||
export const getMinimalSerdeContext = () => {
|
||||
const marshaller = new EventStreamMarshaller({ utf8Encoder: toUtf8, utf8Decoder: fromUtf8 });
|
||||
return {
|
||||
base64Decoder: fromBase64,
|
||||
base64Encoder: toBase64,
|
||||
utf8Decoder: fromUtf8,
|
||||
utf8Encoder: toUtf8,
|
||||
eventStreamMarshaller: marshaller,
|
||||
streamCollector: streamCollector,
|
||||
};
|
||||
};
|
||||
export class Stream extends CoreStream {
|
||||
static fromSSEResponse(response, controller, client) {
|
||||
let consumed = false;
|
||||
const logger = client ? loggerFor(client) : console;
|
||||
async function* iterMessages() {
|
||||
if (!response.body) {
|
||||
controller.abort();
|
||||
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
||||
}
|
||||
const responseBodyIter = ReadableStreamToAsyncIterable(response.body);
|
||||
const eventStream = de_ResponseStream(responseBodyIter, getMinimalSerdeContext());
|
||||
for await (const event of eventStream) {
|
||||
if (event.chunk && event.chunk.bytes) {
|
||||
const s = toUtf8(event.chunk.bytes);
|
||||
yield { event: 'chunk', data: s, raw: [] };
|
||||
}
|
||||
else if (event.internalServerException) {
|
||||
yield { event: 'error', data: 'InternalServerException', raw: [] };
|
||||
}
|
||||
else if (event.modelStreamErrorException) {
|
||||
yield { event: 'error', data: 'ModelStreamErrorException', raw: [] };
|
||||
}
|
||||
else if (event.validationException) {
|
||||
yield { event: 'error', data: 'ValidationException', raw: [] };
|
||||
}
|
||||
else if (event.throttlingException) {
|
||||
yield { event: 'error', data: 'ThrottlingException', raw: [] };
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: this function is copied entirely from the core SDK
|
||||
async function* iterator() {
|
||||
if (consumed) {
|
||||
throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
|
||||
}
|
||||
consumed = true;
|
||||
let done = false;
|
||||
try {
|
||||
for await (const sse of iterMessages()) {
|
||||
if (sse.event === 'chunk') {
|
||||
try {
|
||||
yield JSON.parse(sse.data);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(`Could not parse message into JSON:`, sse.data);
|
||||
logger.error(`From chunk:`, sse.raw);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (sse.event === 'error') {
|
||||
const errText = sse.data;
|
||||
const errJSON = safeJSON(errText);
|
||||
const errMessage = errJSON ? undefined : errText;
|
||||
throw APIError.generate(undefined, errJSON, errMessage, response.headers);
|
||||
}
|
||||
}
|
||||
done = true;
|
||||
}
|
||||
catch (e) {
|
||||
// If the user calls `stream.controller.abort()`, we should exit without throwing.
|
||||
if (isAbortError(e))
|
||||
return;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
// If the user `break`s, abort the ongoing request.
|
||||
if (!done)
|
||||
controller.abort();
|
||||
}
|
||||
}
|
||||
return new Stream(iterator, controller);
|
||||
}
|
||||
}
|
||||
function isAbortError(err) {
|
||||
return (typeof err === 'object' &&
|
||||
err !== null &&
|
||||
// Spec-compliant fetch implementations
|
||||
(('name' in err && err.name === 'AbortError') ||
|
||||
// Expo fetch
|
||||
('message' in err && String(err.message).includes('FetchRequestCanceledException'))));
|
||||
}
|
||||
//# sourceMappingURL=streaming.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/index.mjs
generated
vendored
Normal file
3
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./client.mjs";
|
||||
export { AnthropicBedrock as default } from "./client.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/headers.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/headers.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { isReadonlyArray } from "./utils/values.mjs";
|
||||
const brand_privateNullableHeaders = Symbol.for('brand.privateNullableHeaders');
|
||||
function* iterateHeaders(headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
if (brand_privateNullableHeaders in headers) {
|
||||
const { values, nulls } = headers;
|
||||
yield* values.entries();
|
||||
for (const name of nulls) {
|
||||
yield [name, null];
|
||||
}
|
||||
return;
|
||||
}
|
||||
let shouldClear = false;
|
||||
let iter;
|
||||
if (headers instanceof Headers) {
|
||||
iter = headers.entries();
|
||||
}
|
||||
else if (isReadonlyArray(headers)) {
|
||||
iter = headers;
|
||||
}
|
||||
else {
|
||||
shouldClear = true;
|
||||
iter = Object.entries(headers ?? {});
|
||||
}
|
||||
for (let row of iter) {
|
||||
const name = row[0];
|
||||
if (typeof name !== 'string')
|
||||
throw new TypeError('expected header name to be a string');
|
||||
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
||||
let didClear = false;
|
||||
for (const value of values) {
|
||||
if (value === undefined)
|
||||
continue;
|
||||
// Objects keys always overwrite older headers, they never append.
|
||||
// Yield a null to clear the header before adding the new values.
|
||||
if (shouldClear && !didClear) {
|
||||
didClear = true;
|
||||
yield [name, null];
|
||||
}
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
export const buildHeaders = (newHeaders) => {
|
||||
const targetHeaders = new Headers();
|
||||
const nullHeaders = new Set();
|
||||
for (const headers of newHeaders) {
|
||||
const seenHeaders = new Set();
|
||||
for (const [name, value] of iterateHeaders(headers)) {
|
||||
const lowerName = name.toLowerCase();
|
||||
if (!seenHeaders.has(lowerName)) {
|
||||
targetHeaders.delete(name);
|
||||
seenHeaders.add(lowerName);
|
||||
}
|
||||
if (value === null) {
|
||||
targetHeaders.delete(name);
|
||||
nullHeaders.add(lowerName);
|
||||
}
|
||||
else {
|
||||
targetHeaders.append(name, value);
|
||||
nullHeaders.delete(lowerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
||||
};
|
||||
export const isEmptyHeaders = (headers) => {
|
||||
for (const _ of iterateHeaders(headers))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
//# sourceMappingURL=headers.mjs.map
|
||||
85
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/shims.mjs
generated
vendored
Normal file
85
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/shims.mjs
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function getDefaultFetch() {
|
||||
if (typeof fetch !== 'undefined') {
|
||||
return fetch;
|
||||
}
|
||||
throw new Error('`fetch` is not defined as a global; Either pass `fetch` to the client, `new Anthropic({ fetch })` or polyfill the global, `globalThis.fetch = fetch`');
|
||||
}
|
||||
export function makeReadableStream(...args) {
|
||||
const ReadableStream = globalThis.ReadableStream;
|
||||
if (typeof ReadableStream === 'undefined') {
|
||||
// Note: All of the platforms / runtimes we officially support already define
|
||||
// `ReadableStream` as a global, so this should only ever be hit on unsupported runtimes.
|
||||
throw new Error('`ReadableStream` is not defined as a global; You will need to polyfill it, `globalThis.ReadableStream = ReadableStream`');
|
||||
}
|
||||
return new ReadableStream(...args);
|
||||
}
|
||||
export function ReadableStreamFrom(iterable) {
|
||||
let iter = Symbol.asyncIterator in iterable ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
|
||||
return makeReadableStream({
|
||||
start() { },
|
||||
async pull(controller) {
|
||||
const { done, value } = await iter.next();
|
||||
if (done) {
|
||||
controller.close();
|
||||
}
|
||||
else {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await iter.return?.();
|
||||
},
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Most browsers don't yet have async iterable support for ReadableStream,
|
||||
* and Node has a very different way of reading bytes from its "ReadableStream".
|
||||
*
|
||||
* This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
|
||||
*/
|
||||
export function ReadableStreamToAsyncIterable(stream) {
|
||||
if (stream[Symbol.asyncIterator])
|
||||
return stream;
|
||||
const reader = stream.getReader();
|
||||
return {
|
||||
async next() {
|
||||
try {
|
||||
const result = await reader.read();
|
||||
if (result?.done)
|
||||
reader.releaseLock(); // release lock when stream becomes closed
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
reader.releaseLock(); // release lock when stream becomes errored
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async return() {
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
return { done: true, value: undefined };
|
||||
},
|
||||
[Symbol.asyncIterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Cancels a ReadableStream we don't need to consume.
|
||||
* See https://undici.nodejs.org/#/?id=garbage-collection
|
||||
*/
|
||||
export async function CancelReadableStream(stream) {
|
||||
if (stream === null || typeof stream !== 'object')
|
||||
return;
|
||||
if (stream[Symbol.asyncIterator]) {
|
||||
await stream[Symbol.asyncIterator]().return?.();
|
||||
return;
|
||||
}
|
||||
const reader = stream.getReader();
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
}
|
||||
//# sourceMappingURL=shims.mjs.map
|
||||
18
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/env.mjs
generated
vendored
Normal file
18
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/env.mjs
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* Read an environment variable.
|
||||
*
|
||||
* Trims beginning and trailing whitespace.
|
||||
*
|
||||
* Will return undefined if the environment variable doesn't exist or cannot be accessed.
|
||||
*/
|
||||
export const readEnv = (env) => {
|
||||
if (typeof globalThis.process !== 'undefined') {
|
||||
return globalThis.process.env?.[env]?.trim() ?? undefined;
|
||||
}
|
||||
if (typeof globalThis.Deno !== 'undefined') {
|
||||
return globalThis.Deno.env?.get?.(env)?.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
//# sourceMappingURL=env.mjs.map
|
||||
80
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/log.mjs
generated
vendored
Normal file
80
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/log.mjs
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { hasOwn } from "./values.mjs";
|
||||
const levelNumbers = {
|
||||
off: 0,
|
||||
error: 200,
|
||||
warn: 300,
|
||||
info: 400,
|
||||
debug: 500,
|
||||
};
|
||||
export const parseLogLevel = (maybeLevel, sourceName, client) => {
|
||||
if (!maybeLevel) {
|
||||
return undefined;
|
||||
}
|
||||
if (hasOwn(levelNumbers, maybeLevel)) {
|
||||
return maybeLevel;
|
||||
}
|
||||
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
||||
return undefined;
|
||||
};
|
||||
function noop() { }
|
||||
function makeLogFn(fnLevel, logger, logLevel) {
|
||||
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
||||
return noop;
|
||||
}
|
||||
else {
|
||||
// Don't wrap logger functions, we want the stacktrace intact!
|
||||
return logger[fnLevel].bind(logger);
|
||||
}
|
||||
}
|
||||
const noopLogger = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
debug: noop,
|
||||
};
|
||||
let cachedLoggers = /* @__PURE__ */ new WeakMap();
|
||||
export function loggerFor(client) {
|
||||
const logger = client.logger;
|
||||
const logLevel = client.logLevel ?? 'off';
|
||||
if (!logger) {
|
||||
return noopLogger;
|
||||
}
|
||||
const cachedLogger = cachedLoggers.get(logger);
|
||||
if (cachedLogger && cachedLogger[0] === logLevel) {
|
||||
return cachedLogger[1];
|
||||
}
|
||||
const levelLogger = {
|
||||
error: makeLogFn('error', logger, logLevel),
|
||||
warn: makeLogFn('warn', logger, logLevel),
|
||||
info: makeLogFn('info', logger, logLevel),
|
||||
debug: makeLogFn('debug', logger, logLevel),
|
||||
};
|
||||
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
||||
return levelLogger;
|
||||
}
|
||||
export const formatRequestDetails = (details) => {
|
||||
if (details.options) {
|
||||
details.options = { ...details.options };
|
||||
delete details.options['headers']; // redundant + leaks internals
|
||||
}
|
||||
if (details.headers) {
|
||||
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
||||
name,
|
||||
(name.toLowerCase() === 'x-api-key' ||
|
||||
name.toLowerCase() === 'authorization' ||
|
||||
name.toLowerCase() === 'cookie' ||
|
||||
name.toLowerCase() === 'set-cookie') ?
|
||||
'***'
|
||||
: value,
|
||||
]));
|
||||
}
|
||||
if ('retryOfRequestLogID' in details) {
|
||||
if (details.retryOfRequestLogID) {
|
||||
details.retryOf = details.retryOfRequestLogID;
|
||||
}
|
||||
delete details.retryOfRequestLogID;
|
||||
}
|
||||
return details;
|
||||
};
|
||||
//# sourceMappingURL=log.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/path.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/path.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
/**
|
||||
* Percent-encode everything that isn't safe to have in a path without encoding safe chars.
|
||||
*
|
||||
* Taken from https://datatracker.ietf.org/doc/html/rfc3986#section-3.3:
|
||||
* > unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
|
||||
* > sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
|
||||
* > pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
|
||||
*/
|
||||
export function encodeURIPath(str) {
|
||||
return str.replace(/[^A-Za-z0-9\-._~!$&'()*+,;=:@]+/g, encodeURIComponent);
|
||||
}
|
||||
const EMPTY = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.create(null));
|
||||
export const createPathTagFunction = (pathEncoder = encodeURIPath) => function path(statics, ...params) {
|
||||
// If there are no params, no processing is needed.
|
||||
if (statics.length === 1)
|
||||
return statics[0];
|
||||
let postPath = false;
|
||||
const invalidSegments = [];
|
||||
const path = statics.reduce((previousValue, currentValue, index) => {
|
||||
if (/[?#]/.test(currentValue)) {
|
||||
postPath = true;
|
||||
}
|
||||
const value = params[index];
|
||||
let encoded = (postPath ? encodeURIComponent : pathEncoder)('' + value);
|
||||
if (index !== params.length &&
|
||||
(value == null ||
|
||||
(typeof value === 'object' &&
|
||||
// handle values from other realms
|
||||
value.toString ===
|
||||
Object.getPrototypeOf(Object.getPrototypeOf(value.hasOwnProperty ?? EMPTY) ?? EMPTY)
|
||||
?.toString))) {
|
||||
encoded = value + '';
|
||||
invalidSegments.push({
|
||||
start: previousValue.length + currentValue.length,
|
||||
length: encoded.length,
|
||||
error: `Value of type ${Object.prototype.toString
|
||||
.call(value)
|
||||
.slice(8, -1)} is not a valid path parameter`,
|
||||
});
|
||||
}
|
||||
return previousValue + currentValue + (index === params.length ? '' : encoded);
|
||||
}, '');
|
||||
const pathOnly = path.split(/[?#]/, 1)[0];
|
||||
const invalidSegmentPattern = /(?<=^|\/)(?:\.|%2e){1,2}(?=\/|$)/gi;
|
||||
let match;
|
||||
// Find all invalid segments
|
||||
while ((match = invalidSegmentPattern.exec(pathOnly)) !== null) {
|
||||
invalidSegments.push({
|
||||
start: match.index,
|
||||
length: match[0].length,
|
||||
error: `Value "${match[0]}" can\'t be safely passed as a path parameter`,
|
||||
});
|
||||
}
|
||||
invalidSegments.sort((a, b) => a.start - b.start);
|
||||
if (invalidSegments.length > 0) {
|
||||
let lastEnd = 0;
|
||||
const underline = invalidSegments.reduce((acc, segment) => {
|
||||
const spaces = ' '.repeat(segment.start - lastEnd);
|
||||
const arrows = '^'.repeat(segment.length);
|
||||
lastEnd = segment.start + segment.length;
|
||||
return acc + spaces + arrows;
|
||||
}, '');
|
||||
throw new AnthropicError(`Path parameters result in path with invalid segments:\n${invalidSegments
|
||||
.map((e) => e.error)
|
||||
.join('\n')}\n${path}\n${underline}`);
|
||||
}
|
||||
return path;
|
||||
};
|
||||
/**
|
||||
* URI-encodes path params and ensures no unsafe /./ or /../ path segments are introduced.
|
||||
*/
|
||||
export const path = /* @__PURE__ */ createPathTagFunction(encodeURIPath);
|
||||
//# sourceMappingURL=path.mjs.map
|
||||
94
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/values.mjs
generated
vendored
Normal file
94
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/values.mjs
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
// https://url.spec.whatwg.org/#url-scheme-string
|
||||
const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i;
|
||||
export const isAbsoluteURL = (url) => {
|
||||
return startsWithSchemeRegexp.test(url);
|
||||
};
|
||||
export let isArray = (val) => ((isArray = Array.isArray), isArray(val));
|
||||
export let isReadonlyArray = isArray;
|
||||
/** Returns an object if the given value isn't an object, otherwise returns as-is */
|
||||
export function maybeObj(x) {
|
||||
if (typeof x !== 'object') {
|
||||
return {};
|
||||
}
|
||||
return x ?? {};
|
||||
}
|
||||
// https://stackoverflow.com/a/34491287
|
||||
export function isEmptyObj(obj) {
|
||||
if (!obj)
|
||||
return true;
|
||||
for (const _k in obj)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
// https://eslint.org/docs/latest/rules/no-prototype-builtins
|
||||
export function hasOwn(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
export function isObj(obj) {
|
||||
return obj != null && typeof obj === 'object' && !Array.isArray(obj);
|
||||
}
|
||||
export const ensurePresent = (value) => {
|
||||
if (value == null) {
|
||||
throw new AnthropicError(`Expected a value to be given but received ${value} instead.`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
export const validatePositiveInteger = (name, n) => {
|
||||
if (typeof n !== 'number' || !Number.isInteger(n)) {
|
||||
throw new AnthropicError(`${name} must be an integer`);
|
||||
}
|
||||
if (n < 0) {
|
||||
throw new AnthropicError(`${name} must be a positive integer`);
|
||||
}
|
||||
return n;
|
||||
};
|
||||
export const coerceInteger = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return Math.round(value);
|
||||
if (typeof value === 'string')
|
||||
return parseInt(value, 10);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceFloat = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return parseFloat(value);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceBoolean = (value) => {
|
||||
if (typeof value === 'boolean')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return value === 'true';
|
||||
return Boolean(value);
|
||||
};
|
||||
export const maybeCoerceInteger = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceInteger(value);
|
||||
};
|
||||
export const maybeCoerceFloat = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceFloat(value);
|
||||
};
|
||||
export const maybeCoerceBoolean = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceBoolean(value);
|
||||
};
|
||||
export const safeJSON = (text) => {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=values.mjs.map
|
||||
98
extracted-source/node_modules/@anthropic-ai/foundry-sdk/client.mjs
generated
vendored
Normal file
98
extracted-source/node_modules/@anthropic-ai/foundry-sdk/client.mjs
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
import { buildHeaders } from "./internal/headers.mjs";
|
||||
import * as Errors from "./core/error.mjs";
|
||||
import { readEnv } from "./internal/utils.mjs";
|
||||
import { Anthropic } from '@anthropic-ai/sdk/client';
|
||||
export { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
import * as Resources from '@anthropic-ai/sdk/resources/index';
|
||||
/** API Client for interfacing with the Anthropic Foundry API. */
|
||||
export class AnthropicFoundry extends Anthropic {
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic Foundry API.
|
||||
*
|
||||
* @param {string | undefined} [opts.resource=process.env['ANTHROPIC_FOUNDRY_RESOURCE'] ?? undefined] - Your Foundry resource name
|
||||
* @param {string | undefined} [opts.apiKey=process.env['ANTHROPIC_FOUNDRY_API_KEY'] ?? undefined]
|
||||
* @param {string | null | undefined} [opts.organization=process.env['ANTHROPIC_ORG_ID'] ?? null]
|
||||
* @param {string} [opts.baseURL=process.env['ANTHROPIC_FOUNDRY_BASE_URL']] - Sets the base URL for the API, e.g. `https://example-resource.azure.anthropic.com/anthropic/`.
|
||||
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
||||
* @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections.
|
||||
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
||||
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
||||
* @param {Headers} opts.defaultHeaders - Default headers to include with every request to the API.
|
||||
* @param {DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API.
|
||||
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
||||
*/
|
||||
constructor({ baseURL = readEnv('ANTHROPIC_FOUNDRY_BASE_URL'), apiKey = readEnv('ANTHROPIC_FOUNDRY_API_KEY'), resource = readEnv('ANTHROPIC_FOUNDRY_RESOURCE'), azureADTokenProvider, dangerouslyAllowBrowser, ...opts } = {}) {
|
||||
if (typeof azureADTokenProvider === 'function') {
|
||||
dangerouslyAllowBrowser = true;
|
||||
}
|
||||
if (!azureADTokenProvider && !apiKey) {
|
||||
throw new Errors.AnthropicError('Missing credentials. Please pass one of `apiKey` and `azureTokenProvider`, or set the `ANTHROPIC_FOUNDRY_API_KEY` environment variable.');
|
||||
}
|
||||
if (azureADTokenProvider && apiKey) {
|
||||
throw new Errors.AnthropicError('The `apiKey` and `azureADTokenProvider` arguments are mutually exclusive; only one can be passed at a time.');
|
||||
}
|
||||
if (!baseURL) {
|
||||
if (!resource) {
|
||||
throw new Errors.AnthropicError('Must provide one of the `baseURL` or `resource` arguments, or the `ANTHROPIC_FOUNDRY_RESOURCE` environment variable');
|
||||
}
|
||||
baseURL = `https://${resource}.services.ai.azure.com/anthropic/`;
|
||||
}
|
||||
else {
|
||||
if (resource) {
|
||||
throw new Errors.AnthropicError('baseURL and resource are mutually exclusive');
|
||||
}
|
||||
}
|
||||
super({
|
||||
apiKey: azureADTokenProvider ?? apiKey,
|
||||
baseURL,
|
||||
...opts,
|
||||
...(dangerouslyAllowBrowser !== undefined ? { dangerouslyAllowBrowser } : {}),
|
||||
});
|
||||
this.resource = null;
|
||||
// @ts-expect-error are using a different Messages type that omits batches
|
||||
this.messages = makeMessagesResource(this);
|
||||
// @ts-expect-error are using a different Beta type that omits batches
|
||||
this.beta = makeBetaResource(this);
|
||||
// @ts-expect-error Anthropic Foundry does not support models endpoint
|
||||
this.models = undefined;
|
||||
}
|
||||
async authHeaders() {
|
||||
if (typeof this._options.apiKey === 'function') {
|
||||
let token;
|
||||
try {
|
||||
token = await this._options.apiKey();
|
||||
}
|
||||
catch (err) {
|
||||
if (err instanceof Errors.AnthropicError)
|
||||
throw err;
|
||||
throw new Errors.AnthropicError(`Failed to get token from azureADTokenProvider: ${err.message}`,
|
||||
// @ts-ignore
|
||||
{ cause: err });
|
||||
}
|
||||
if (typeof token !== 'string' || !token) {
|
||||
throw new Errors.AnthropicError(`Expected azureADTokenProvider function argument to return a string but it returned ${token}`);
|
||||
}
|
||||
return buildHeaders([{ Authorization: `Bearer ${token}` }]);
|
||||
}
|
||||
if (typeof this._options.apiKey === 'string') {
|
||||
return buildHeaders([{ 'x-api-key': this.apiKey }]);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
validateHeaders() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
function makeMessagesResource(client) {
|
||||
const resource = new Resources.Messages(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.batches;
|
||||
return resource;
|
||||
}
|
||||
function makeBetaResource(client) {
|
||||
const resource = new Resources.Beta(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.batches;
|
||||
return resource;
|
||||
}
|
||||
//# sourceMappingURL=client.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/foundry-sdk/core/error.mjs
generated
vendored
Normal file
2
extracted-source/node_modules/@anthropic-ai/foundry-sdk/core/error.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from '@anthropic-ai/sdk/core/error';
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/foundry-sdk/index.mjs
generated
vendored
Normal file
3
extracted-source/node_modules/@anthropic-ai/foundry-sdk/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./client.mjs";
|
||||
export { AnthropicFoundry as default } from "./client.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/headers.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/headers.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { isReadonlyArray } from "./utils/values.mjs";
|
||||
const brand_privateNullableHeaders = Symbol.for('brand.privateNullableHeaders');
|
||||
function* iterateHeaders(headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
if (brand_privateNullableHeaders in headers) {
|
||||
const { values, nulls } = headers;
|
||||
yield* values.entries();
|
||||
for (const name of nulls) {
|
||||
yield [name, null];
|
||||
}
|
||||
return;
|
||||
}
|
||||
let shouldClear = false;
|
||||
let iter;
|
||||
if (headers instanceof Headers) {
|
||||
iter = headers.entries();
|
||||
}
|
||||
else if (isReadonlyArray(headers)) {
|
||||
iter = headers;
|
||||
}
|
||||
else {
|
||||
shouldClear = true;
|
||||
iter = Object.entries(headers ?? {});
|
||||
}
|
||||
for (let row of iter) {
|
||||
const name = row[0];
|
||||
if (typeof name !== 'string')
|
||||
throw new TypeError('expected header name to be a string');
|
||||
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
||||
let didClear = false;
|
||||
for (const value of values) {
|
||||
if (value === undefined)
|
||||
continue;
|
||||
// Objects keys always overwrite older headers, they never append.
|
||||
// Yield a null to clear the header before adding the new values.
|
||||
if (shouldClear && !didClear) {
|
||||
didClear = true;
|
||||
yield [name, null];
|
||||
}
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
export const buildHeaders = (newHeaders) => {
|
||||
const targetHeaders = new Headers();
|
||||
const nullHeaders = new Set();
|
||||
for (const headers of newHeaders) {
|
||||
const seenHeaders = new Set();
|
||||
for (const [name, value] of iterateHeaders(headers)) {
|
||||
const lowerName = name.toLowerCase();
|
||||
if (!seenHeaders.has(lowerName)) {
|
||||
targetHeaders.delete(name);
|
||||
seenHeaders.add(lowerName);
|
||||
}
|
||||
if (value === null) {
|
||||
targetHeaders.delete(name);
|
||||
nullHeaders.add(lowerName);
|
||||
}
|
||||
else {
|
||||
targetHeaders.append(name, value);
|
||||
nullHeaders.delete(lowerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
||||
};
|
||||
export const isEmptyHeaders = (headers) => {
|
||||
for (const _ of iterateHeaders(headers))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
//# sourceMappingURL=headers.mjs.map
|
||||
8
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils.mjs
generated
vendored
Normal file
8
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils.mjs
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export * from "./utils/values.mjs";
|
||||
export * from "./utils/base64.mjs";
|
||||
export * from "./utils/env.mjs";
|
||||
export * from "./utils/log.mjs";
|
||||
export * from "./utils/uuid.mjs";
|
||||
export * from "./utils/sleep.mjs";
|
||||
//# sourceMappingURL=utils.mjs.map
|
||||
33
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/base64.mjs
generated
vendored
Normal file
33
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/base64.mjs
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
import { encodeUTF8 } from "./bytes.mjs";
|
||||
export const toBase64 = (data) => {
|
||||
if (!data)
|
||||
return '';
|
||||
if (typeof globalThis.Buffer !== 'undefined') {
|
||||
return globalThis.Buffer.from(data).toString('base64');
|
||||
}
|
||||
if (typeof data === 'string') {
|
||||
data = encodeUTF8(data);
|
||||
}
|
||||
if (typeof btoa !== 'undefined') {
|
||||
return btoa(String.fromCharCode.apply(null, data));
|
||||
}
|
||||
throw new AnthropicError('Cannot generate base64 string; Expected `Buffer` or `btoa` to be defined');
|
||||
};
|
||||
export const fromBase64 = (str) => {
|
||||
if (typeof globalThis.Buffer !== 'undefined') {
|
||||
const buf = globalThis.Buffer.from(str, 'base64');
|
||||
return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
}
|
||||
if (typeof atob !== 'undefined') {
|
||||
const bstr = atob(str);
|
||||
const buf = new Uint8Array(bstr.length);
|
||||
for (let i = 0; i < bstr.length; i++) {
|
||||
buf[i] = bstr.charCodeAt(i);
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
throw new AnthropicError('Cannot decode base64 string; Expected `Buffer` or `atob` to be defined');
|
||||
};
|
||||
//# sourceMappingURL=base64.mjs.map
|
||||
18
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/env.mjs
generated
vendored
Normal file
18
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/env.mjs
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* Read an environment variable.
|
||||
*
|
||||
* Trims beginning and trailing whitespace.
|
||||
*
|
||||
* Will return undefined if the environment variable doesn't exist or cannot be accessed.
|
||||
*/
|
||||
export const readEnv = (env) => {
|
||||
if (typeof globalThis.process !== 'undefined') {
|
||||
return globalThis.process.env?.[env]?.trim() ?? undefined;
|
||||
}
|
||||
if (typeof globalThis.Deno !== 'undefined') {
|
||||
return globalThis.Deno.env?.get?.(env)?.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
//# sourceMappingURL=env.mjs.map
|
||||
80
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/log.mjs
generated
vendored
Normal file
80
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/log.mjs
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { hasOwn } from "./values.mjs";
|
||||
const levelNumbers = {
|
||||
off: 0,
|
||||
error: 200,
|
||||
warn: 300,
|
||||
info: 400,
|
||||
debug: 500,
|
||||
};
|
||||
export const parseLogLevel = (maybeLevel, sourceName, client) => {
|
||||
if (!maybeLevel) {
|
||||
return undefined;
|
||||
}
|
||||
if (hasOwn(levelNumbers, maybeLevel)) {
|
||||
return maybeLevel;
|
||||
}
|
||||
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
||||
return undefined;
|
||||
};
|
||||
function noop() { }
|
||||
function makeLogFn(fnLevel, logger, logLevel) {
|
||||
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
||||
return noop;
|
||||
}
|
||||
else {
|
||||
// Don't wrap logger functions, we want the stacktrace intact!
|
||||
return logger[fnLevel].bind(logger);
|
||||
}
|
||||
}
|
||||
const noopLogger = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
debug: noop,
|
||||
};
|
||||
let cachedLoggers = /* @__PURE__ */ new WeakMap();
|
||||
export function loggerFor(client) {
|
||||
const logger = client.logger;
|
||||
const logLevel = client.logLevel ?? 'off';
|
||||
if (!logger) {
|
||||
return noopLogger;
|
||||
}
|
||||
const cachedLogger = cachedLoggers.get(logger);
|
||||
if (cachedLogger && cachedLogger[0] === logLevel) {
|
||||
return cachedLogger[1];
|
||||
}
|
||||
const levelLogger = {
|
||||
error: makeLogFn('error', logger, logLevel),
|
||||
warn: makeLogFn('warn', logger, logLevel),
|
||||
info: makeLogFn('info', logger, logLevel),
|
||||
debug: makeLogFn('debug', logger, logLevel),
|
||||
};
|
||||
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
||||
return levelLogger;
|
||||
}
|
||||
export const formatRequestDetails = (details) => {
|
||||
if (details.options) {
|
||||
details.options = { ...details.options };
|
||||
delete details.options['headers']; // redundant + leaks internals
|
||||
}
|
||||
if (details.headers) {
|
||||
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
||||
name,
|
||||
(name.toLowerCase() === 'x-api-key' ||
|
||||
name.toLowerCase() === 'authorization' ||
|
||||
name.toLowerCase() === 'cookie' ||
|
||||
name.toLowerCase() === 'set-cookie') ?
|
||||
'***'
|
||||
: value,
|
||||
]));
|
||||
}
|
||||
if ('retryOfRequestLogID' in details) {
|
||||
if (details.retryOfRequestLogID) {
|
||||
details.retryOf = details.retryOfRequestLogID;
|
||||
}
|
||||
delete details.retryOfRequestLogID;
|
||||
}
|
||||
return details;
|
||||
};
|
||||
//# sourceMappingURL=log.mjs.map
|
||||
100
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/values.mjs
generated
vendored
Normal file
100
extracted-source/node_modules/@anthropic-ai/foundry-sdk/internal/utils/values.mjs
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
// https://url.spec.whatwg.org/#url-scheme-string
|
||||
const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i;
|
||||
export const isAbsoluteURL = (url) => {
|
||||
return startsWithSchemeRegexp.test(url);
|
||||
};
|
||||
export let isArray = (val) => ((isArray = Array.isArray), isArray(val));
|
||||
export let isReadonlyArray = isArray;
|
||||
/** Returns an object if the given value isn't an object, otherwise returns as-is */
|
||||
export function maybeObj(x) {
|
||||
if (typeof x !== 'object') {
|
||||
return {};
|
||||
}
|
||||
return x ?? {};
|
||||
}
|
||||
// https://stackoverflow.com/a/34491287
|
||||
export function isEmptyObj(obj) {
|
||||
if (!obj)
|
||||
return true;
|
||||
for (const _k in obj)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
// https://eslint.org/docs/latest/rules/no-prototype-builtins
|
||||
export function hasOwn(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
export function isObj(obj) {
|
||||
return obj != null && typeof obj === 'object' && !Array.isArray(obj);
|
||||
}
|
||||
export const ensurePresent = (value) => {
|
||||
if (value == null) {
|
||||
throw new AnthropicError(`Expected a value to be given but received ${value} instead.`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
export const validatePositiveInteger = (name, n) => {
|
||||
if (typeof n !== 'number' || !Number.isInteger(n)) {
|
||||
throw new AnthropicError(`${name} must be an integer`);
|
||||
}
|
||||
if (n < 0) {
|
||||
throw new AnthropicError(`${name} must be a positive integer`);
|
||||
}
|
||||
return n;
|
||||
};
|
||||
export const coerceInteger = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return Math.round(value);
|
||||
if (typeof value === 'string')
|
||||
return parseInt(value, 10);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceFloat = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return parseFloat(value);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceBoolean = (value) => {
|
||||
if (typeof value === 'boolean')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return value === 'true';
|
||||
return Boolean(value);
|
||||
};
|
||||
export const maybeCoerceInteger = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceInteger(value);
|
||||
};
|
||||
export const maybeCoerceFloat = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceFloat(value);
|
||||
};
|
||||
export const maybeCoerceBoolean = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceBoolean(value);
|
||||
};
|
||||
export const safeJSON = (text) => {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
// Gets a value from an object, deletes the key, and returns the value (or undefined if not found)
|
||||
export const pop = (obj, key) => {
|
||||
const value = obj[key];
|
||||
delete obj[key];
|
||||
return value;
|
||||
};
|
||||
//# sourceMappingURL=values.mjs.map
|
||||
705
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/init.js
generated
vendored
Normal file
705
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/init.js
generated
vendored
Normal file
@@ -0,0 +1,705 @@
|
||||
import { confirm, input, select } from "@inquirer/prompts";
|
||||
import { existsSync, readFileSync, writeFileSync } from "fs";
|
||||
import { basename, join, resolve } from "path";
|
||||
import { CURRENT_MANIFEST_VERSION } from "../schemas.js";
|
||||
export function readPackageJson(dirPath) {
|
||||
const packageJsonPath = join(dirPath, "package.json");
|
||||
if (existsSync(packageJsonPath)) {
|
||||
try {
|
||||
return JSON.parse(readFileSync(packageJsonPath, "utf-8"));
|
||||
}
|
||||
catch (e) {
|
||||
// Ignore package.json parsing errors
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
export function getDefaultAuthorName(packageData) {
|
||||
if (typeof packageData.author === "string") {
|
||||
return packageData.author;
|
||||
}
|
||||
return packageData.author?.name || "";
|
||||
}
|
||||
export function getDefaultAuthorEmail(packageData) {
|
||||
if (typeof packageData.author === "object") {
|
||||
return packageData.author?.email || "";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
export function getDefaultAuthorUrl(packageData) {
|
||||
if (typeof packageData.author === "object") {
|
||||
return packageData.author?.url || "";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
export function getDefaultRepositoryUrl(packageData) {
|
||||
if (typeof packageData.repository === "string") {
|
||||
return packageData.repository;
|
||||
}
|
||||
return packageData.repository?.url || "";
|
||||
}
|
||||
export function getDefaultBasicInfo(packageData, resolvedPath) {
|
||||
const name = packageData.name || basename(resolvedPath);
|
||||
const authorName = getDefaultAuthorName(packageData) || "Unknown Author";
|
||||
const displayName = name;
|
||||
const version = packageData.version || "1.0.0";
|
||||
const description = packageData.description || "A MCPB bundle";
|
||||
return { name, authorName, displayName, version, description };
|
||||
}
|
||||
export function getDefaultAuthorInfo(packageData) {
|
||||
return {
|
||||
authorEmail: getDefaultAuthorEmail(packageData),
|
||||
authorUrl: getDefaultAuthorUrl(packageData),
|
||||
};
|
||||
}
|
||||
export function getDefaultServerConfig(packageData) {
|
||||
const serverType = "node";
|
||||
const entryPoint = getDefaultEntryPoint(serverType, packageData);
|
||||
const mcp_config = createMcpConfig(serverType, entryPoint);
|
||||
return { serverType, entryPoint, mcp_config };
|
||||
}
|
||||
export function getDefaultOptionalFields(packageData) {
|
||||
return {
|
||||
keywords: "",
|
||||
license: packageData.license || "MIT",
|
||||
repository: undefined,
|
||||
};
|
||||
}
|
||||
export function createMcpConfig(serverType, entryPoint) {
|
||||
switch (serverType) {
|
||||
case "node":
|
||||
return {
|
||||
command: "node",
|
||||
args: ["${__dirname}/" + entryPoint],
|
||||
env: {},
|
||||
};
|
||||
case "python":
|
||||
return {
|
||||
command: "python",
|
||||
args: ["${__dirname}/" + entryPoint],
|
||||
env: {
|
||||
PYTHONPATH: "${__dirname}/server/lib",
|
||||
},
|
||||
};
|
||||
case "binary":
|
||||
return {
|
||||
command: "${__dirname}/" + entryPoint,
|
||||
args: [],
|
||||
env: {},
|
||||
};
|
||||
}
|
||||
}
|
||||
export function getDefaultEntryPoint(serverType, packageData) {
|
||||
switch (serverType) {
|
||||
case "node":
|
||||
return packageData?.main || "server/index.js";
|
||||
case "python":
|
||||
return "server/main.py";
|
||||
case "binary":
|
||||
return "server/my-server";
|
||||
}
|
||||
}
|
||||
export async function promptBasicInfo(packageData, resolvedPath) {
|
||||
const defaultName = packageData.name || basename(resolvedPath);
|
||||
const name = await input({
|
||||
message: "Extension name:",
|
||||
default: defaultName,
|
||||
validate: (value) => value.trim().length > 0 || "Name is required",
|
||||
});
|
||||
const authorName = await input({
|
||||
message: "Author name:",
|
||||
default: getDefaultAuthorName(packageData),
|
||||
validate: (value) => value.trim().length > 0 || "Author name is required",
|
||||
});
|
||||
const displayName = await input({
|
||||
message: "Display name (optional):",
|
||||
default: name,
|
||||
});
|
||||
const version = await input({
|
||||
message: "Version:",
|
||||
default: packageData.version || "1.0.0",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Version is required";
|
||||
if (!/^\d+\.\d+\.\d+/.test(value)) {
|
||||
return "Version must follow semantic versioning (e.g., 1.0.0)";
|
||||
}
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const description = await input({
|
||||
message: "Description:",
|
||||
default: packageData.description || "",
|
||||
validate: (value) => value.trim().length > 0 || "Description is required",
|
||||
});
|
||||
return { name, authorName, displayName, version, description };
|
||||
}
|
||||
export async function promptAuthorInfo(packageData) {
|
||||
const authorEmail = await input({
|
||||
message: "Author email (optional):",
|
||||
default: getDefaultAuthorEmail(packageData),
|
||||
});
|
||||
const authorUrl = await input({
|
||||
message: "Author URL (optional):",
|
||||
default: getDefaultAuthorUrl(packageData),
|
||||
});
|
||||
return { authorEmail, authorUrl };
|
||||
}
|
||||
export async function promptServerConfig(packageData) {
|
||||
const serverType = (await select({
|
||||
message: "Server type:",
|
||||
choices: [
|
||||
{ name: "Node.js", value: "node" },
|
||||
{ name: "Python", value: "python" },
|
||||
{ name: "Binary", value: "binary" },
|
||||
],
|
||||
default: "node",
|
||||
}));
|
||||
const entryPoint = await input({
|
||||
message: "Entry point:",
|
||||
default: getDefaultEntryPoint(serverType, packageData),
|
||||
});
|
||||
const mcp_config = createMcpConfig(serverType, entryPoint);
|
||||
return { serverType, entryPoint, mcp_config };
|
||||
}
|
||||
export async function promptTools() {
|
||||
const addTools = await confirm({
|
||||
message: "Does your MCP Server provide tools you want to advertise (optional)?",
|
||||
default: true,
|
||||
});
|
||||
const tools = [];
|
||||
let toolsGenerated = false;
|
||||
if (addTools) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const toolName = await input({
|
||||
message: "Tool name:",
|
||||
validate: (value) => value.trim().length > 0 || "Tool name is required",
|
||||
});
|
||||
const toolDescription = await input({
|
||||
message: "Tool description (optional):",
|
||||
});
|
||||
tools.push({
|
||||
name: toolName,
|
||||
...(toolDescription ? { description: toolDescription } : {}),
|
||||
});
|
||||
addMore = await confirm({
|
||||
message: "Add another tool?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
// Ask about generated tools
|
||||
toolsGenerated = await confirm({
|
||||
message: "Does your server generate additional tools at runtime?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return { tools, toolsGenerated };
|
||||
}
|
||||
export async function promptPrompts() {
|
||||
const addPrompts = await confirm({
|
||||
message: "Does your MCP Server provide prompts you want to advertise (optional)?",
|
||||
default: false,
|
||||
});
|
||||
const prompts = [];
|
||||
let promptsGenerated = false;
|
||||
if (addPrompts) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const promptName = await input({
|
||||
message: "Prompt name:",
|
||||
validate: (value) => value.trim().length > 0 || "Prompt name is required",
|
||||
});
|
||||
const promptDescription = await input({
|
||||
message: "Prompt description (optional):",
|
||||
});
|
||||
// Ask about arguments
|
||||
const hasArguments = await confirm({
|
||||
message: "Does this prompt have arguments?",
|
||||
default: false,
|
||||
});
|
||||
const argumentNames = [];
|
||||
if (hasArguments) {
|
||||
let addMoreArgs = true;
|
||||
while (addMoreArgs) {
|
||||
const argName = await input({
|
||||
message: "Argument name:",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Argument name is required";
|
||||
if (argumentNames.includes(value)) {
|
||||
return "Argument names must be unique";
|
||||
}
|
||||
return true;
|
||||
},
|
||||
});
|
||||
argumentNames.push(argName);
|
||||
addMoreArgs = await confirm({
|
||||
message: "Add another argument?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
// Prompt for the text template
|
||||
const promptText = await input({
|
||||
message: hasArguments
|
||||
? `Prompt text (use \${arguments.name} for arguments: ${argumentNames.join(", ")}):`
|
||||
: "Prompt text:",
|
||||
validate: (value) => value.trim().length > 0 || "Prompt text is required",
|
||||
});
|
||||
prompts.push({
|
||||
name: promptName,
|
||||
...(promptDescription ? { description: promptDescription } : {}),
|
||||
...(argumentNames.length > 0 ? { arguments: argumentNames } : {}),
|
||||
text: promptText,
|
||||
});
|
||||
addMore = await confirm({
|
||||
message: "Add another prompt?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
// Ask about generated prompts
|
||||
promptsGenerated = await confirm({
|
||||
message: "Does your server generate additional prompts at runtime?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return { prompts, promptsGenerated };
|
||||
}
|
||||
export async function promptOptionalFields(packageData) {
|
||||
const keywords = await input({
|
||||
message: "Keywords (comma-separated, optional):",
|
||||
default: "",
|
||||
});
|
||||
const license = await input({
|
||||
message: "License:",
|
||||
default: packageData.license || "MIT",
|
||||
});
|
||||
const addRepository = await confirm({
|
||||
message: "Add repository information?",
|
||||
default: !!packageData.repository,
|
||||
});
|
||||
let repository;
|
||||
if (addRepository) {
|
||||
const repoUrl = await input({
|
||||
message: "Repository URL:",
|
||||
default: getDefaultRepositoryUrl(packageData),
|
||||
});
|
||||
if (repoUrl) {
|
||||
repository = {
|
||||
type: "git",
|
||||
url: repoUrl,
|
||||
};
|
||||
}
|
||||
}
|
||||
return { keywords, license, repository };
|
||||
}
|
||||
export async function promptLongDescription(description) {
|
||||
const hasLongDescription = await confirm({
|
||||
message: "Add a detailed long description?",
|
||||
default: false,
|
||||
});
|
||||
if (hasLongDescription) {
|
||||
const longDescription = await input({
|
||||
message: "Long description (supports basic markdown):",
|
||||
default: description,
|
||||
});
|
||||
return longDescription;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
export async function promptUrls() {
|
||||
const homepage = await input({
|
||||
message: "Homepage URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL (e.g., https://example.com)";
|
||||
}
|
||||
},
|
||||
});
|
||||
const documentation = await input({
|
||||
message: "Documentation URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL";
|
||||
}
|
||||
},
|
||||
});
|
||||
const support = await input({
|
||||
message: "Support URL (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
try {
|
||||
new URL(value);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return "Must be a valid URL";
|
||||
}
|
||||
},
|
||||
});
|
||||
return { homepage, documentation, support };
|
||||
}
|
||||
export async function promptVisualAssets() {
|
||||
const icon = await input({
|
||||
message: "Icon file path (optional, relative to manifest):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
if (value.includes(".."))
|
||||
return "Relative paths cannot include '..'";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const addScreenshots = await confirm({
|
||||
message: "Add screenshots?",
|
||||
default: false,
|
||||
});
|
||||
const screenshots = [];
|
||||
if (addScreenshots) {
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const screenshot = await input({
|
||||
message: "Screenshot file path (relative to manifest):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Screenshot path is required";
|
||||
if (value.includes(".."))
|
||||
return "Relative paths cannot include '..'";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
screenshots.push(screenshot);
|
||||
addMore = await confirm({
|
||||
message: "Add another screenshot?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
return { icon, screenshots };
|
||||
}
|
||||
export async function promptCompatibility(serverType) {
|
||||
const addCompatibility = await confirm({
|
||||
message: "Add compatibility constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (!addCompatibility) {
|
||||
return undefined;
|
||||
}
|
||||
const addPlatforms = await confirm({
|
||||
message: "Specify supported platforms?",
|
||||
default: false,
|
||||
});
|
||||
let platforms;
|
||||
if (addPlatforms) {
|
||||
const selectedPlatforms = [];
|
||||
const supportsDarwin = await confirm({
|
||||
message: "Support macOS (darwin)?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsDarwin)
|
||||
selectedPlatforms.push("darwin");
|
||||
const supportsWin32 = await confirm({
|
||||
message: "Support Windows (win32)?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsWin32)
|
||||
selectedPlatforms.push("win32");
|
||||
const supportsLinux = await confirm({
|
||||
message: "Support Linux?",
|
||||
default: true,
|
||||
});
|
||||
if (supportsLinux)
|
||||
selectedPlatforms.push("linux");
|
||||
platforms = selectedPlatforms.length > 0 ? selectedPlatforms : undefined;
|
||||
}
|
||||
let runtimes;
|
||||
if (serverType !== "binary") {
|
||||
const addRuntimes = await confirm({
|
||||
message: "Specify runtime version constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (addRuntimes) {
|
||||
if (serverType === "python") {
|
||||
const pythonVersion = await input({
|
||||
message: "Python version constraint (e.g., >=3.8,<4.0):",
|
||||
validate: (value) => value.trim().length > 0 || "Python version constraint is required",
|
||||
});
|
||||
runtimes = { python: pythonVersion };
|
||||
}
|
||||
else if (serverType === "node") {
|
||||
const nodeVersion = await input({
|
||||
message: "Node.js version constraint (e.g., >=16.0.0):",
|
||||
validate: (value) => value.trim().length > 0 || "Node.js version constraint is required",
|
||||
});
|
||||
runtimes = { node: nodeVersion };
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
...(platforms ? { platforms } : {}),
|
||||
...(runtimes ? { runtimes } : {}),
|
||||
};
|
||||
}
|
||||
export async function promptUserConfig() {
|
||||
const addUserConfig = await confirm({
|
||||
message: "Add user-configurable options?",
|
||||
default: false,
|
||||
});
|
||||
if (!addUserConfig) {
|
||||
return {};
|
||||
}
|
||||
const userConfig = {};
|
||||
let addMore = true;
|
||||
while (addMore) {
|
||||
const optionKey = await input({
|
||||
message: "Configuration option key (unique identifier):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return "Key is required";
|
||||
if (userConfig[value])
|
||||
return "Key must be unique";
|
||||
return true;
|
||||
},
|
||||
});
|
||||
const optionType = (await select({
|
||||
message: "Option type:",
|
||||
choices: [
|
||||
{ name: "String", value: "string" },
|
||||
{ name: "Number", value: "number" },
|
||||
{ name: "Boolean", value: "boolean" },
|
||||
{ name: "Directory", value: "directory" },
|
||||
{ name: "File", value: "file" },
|
||||
],
|
||||
}));
|
||||
const optionTitle = await input({
|
||||
message: "Option title (human-readable name):",
|
||||
validate: (value) => value.trim().length > 0 || "Title is required",
|
||||
});
|
||||
const optionDescription = await input({
|
||||
message: "Option description:",
|
||||
validate: (value) => value.trim().length > 0 || "Description is required",
|
||||
});
|
||||
const optionRequired = await confirm({
|
||||
message: "Is this option required?",
|
||||
default: false,
|
||||
});
|
||||
const optionSensitive = await confirm({
|
||||
message: "Is this option sensitive (like a password)?",
|
||||
default: false,
|
||||
});
|
||||
// Build the option object
|
||||
const option = {
|
||||
type: optionType,
|
||||
title: optionTitle,
|
||||
description: optionDescription,
|
||||
required: optionRequired,
|
||||
sensitive: optionSensitive,
|
||||
};
|
||||
// Add default value if not required
|
||||
if (!optionRequired) {
|
||||
let defaultValue;
|
||||
if (optionType === "boolean") {
|
||||
defaultValue = await confirm({
|
||||
message: "Default value:",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
else if (optionType === "number") {
|
||||
const defaultStr = await input({
|
||||
message: "Default value (number):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
defaultValue = defaultStr ? Number(defaultStr) : undefined;
|
||||
}
|
||||
else {
|
||||
defaultValue = await input({
|
||||
message: "Default value (optional):",
|
||||
});
|
||||
}
|
||||
if (defaultValue !== undefined && defaultValue !== "") {
|
||||
option.default = defaultValue;
|
||||
}
|
||||
}
|
||||
// Add constraints for number types
|
||||
if (optionType === "number") {
|
||||
const addConstraints = await confirm({
|
||||
message: "Add min/max constraints?",
|
||||
default: false,
|
||||
});
|
||||
if (addConstraints) {
|
||||
const min = await input({
|
||||
message: "Minimum value (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
const max = await input({
|
||||
message: "Maximum value (optional):",
|
||||
validate: (value) => {
|
||||
if (!value.trim())
|
||||
return true;
|
||||
return !isNaN(Number(value)) || "Must be a valid number";
|
||||
},
|
||||
});
|
||||
if (min)
|
||||
option.min = Number(min);
|
||||
if (max)
|
||||
option.max = Number(max);
|
||||
}
|
||||
}
|
||||
userConfig[optionKey] = option;
|
||||
addMore = await confirm({
|
||||
message: "Add another configuration option?",
|
||||
default: false,
|
||||
});
|
||||
}
|
||||
return userConfig;
|
||||
}
|
||||
export function buildManifest(basicInfo, longDescription, authorInfo, urls, visualAssets, serverConfig, tools, toolsGenerated, prompts, promptsGenerated, compatibility, userConfig, optionalFields) {
|
||||
const { name, displayName, version, description, authorName } = basicInfo;
|
||||
const { authorEmail, authorUrl } = authorInfo;
|
||||
const { serverType, entryPoint, mcp_config } = serverConfig;
|
||||
const { keywords, license, repository } = optionalFields;
|
||||
return {
|
||||
manifest_version: CURRENT_MANIFEST_VERSION,
|
||||
name,
|
||||
...(displayName && displayName !== name
|
||||
? { display_name: displayName }
|
||||
: {}),
|
||||
version,
|
||||
description,
|
||||
...(longDescription ? { long_description: longDescription } : {}),
|
||||
author: {
|
||||
name: authorName,
|
||||
...(authorEmail ? { email: authorEmail } : {}),
|
||||
...(authorUrl ? { url: authorUrl } : {}),
|
||||
},
|
||||
...(urls.homepage ? { homepage: urls.homepage } : {}),
|
||||
...(urls.documentation ? { documentation: urls.documentation } : {}),
|
||||
...(urls.support ? { support: urls.support } : {}),
|
||||
...(visualAssets.icon ? { icon: visualAssets.icon } : {}),
|
||||
...(visualAssets.screenshots.length > 0
|
||||
? { screenshots: visualAssets.screenshots }
|
||||
: {}),
|
||||
server: {
|
||||
type: serverType,
|
||||
entry_point: entryPoint,
|
||||
mcp_config,
|
||||
},
|
||||
...(tools.length > 0 ? { tools } : {}),
|
||||
...(toolsGenerated ? { tools_generated: true } : {}),
|
||||
...(prompts.length > 0 ? { prompts } : {}),
|
||||
...(promptsGenerated ? { prompts_generated: true } : {}),
|
||||
...(compatibility ? { compatibility } : {}),
|
||||
...(Object.keys(userConfig).length > 0 ? { user_config: userConfig } : {}),
|
||||
...(keywords
|
||||
? {
|
||||
keywords: keywords
|
||||
.split(",")
|
||||
.map((k) => k.trim())
|
||||
.filter((k) => k),
|
||||
}
|
||||
: {}),
|
||||
...(license ? { license } : {}),
|
||||
...(repository ? { repository } : {}),
|
||||
};
|
||||
}
|
||||
export function printNextSteps() {
|
||||
console.log("\nNext steps:");
|
||||
console.log(`1. Ensure all your production dependencies are in this directory`);
|
||||
console.log(`2. Run 'mcpb pack' to create your .mcpb file`);
|
||||
}
|
||||
export async function initExtension(targetPath = process.cwd(), nonInteractive = false) {
|
||||
const resolvedPath = resolve(targetPath);
|
||||
const manifestPath = join(resolvedPath, "manifest.json");
|
||||
if (existsSync(manifestPath)) {
|
||||
if (nonInteractive) {
|
||||
console.log("manifest.json already exists. Use --force to overwrite in non-interactive mode.");
|
||||
return false;
|
||||
}
|
||||
const overwrite = await confirm({
|
||||
message: "manifest.json already exists. Overwrite?",
|
||||
default: false,
|
||||
});
|
||||
if (!overwrite) {
|
||||
console.log("Cancelled");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!nonInteractive) {
|
||||
console.log("This utility will help you create a manifest.json file for your MCPB bundle.");
|
||||
console.log("Press ^C at any time to quit.\n");
|
||||
}
|
||||
else {
|
||||
console.log("Creating manifest.json with default values...");
|
||||
}
|
||||
try {
|
||||
const packageData = readPackageJson(resolvedPath);
|
||||
// Prompt for all information or use defaults
|
||||
const basicInfo = nonInteractive
|
||||
? getDefaultBasicInfo(packageData, resolvedPath)
|
||||
: await promptBasicInfo(packageData, resolvedPath);
|
||||
const longDescription = nonInteractive
|
||||
? undefined
|
||||
: await promptLongDescription(basicInfo.description);
|
||||
const authorInfo = nonInteractive
|
||||
? getDefaultAuthorInfo(packageData)
|
||||
: await promptAuthorInfo(packageData);
|
||||
const urls = nonInteractive
|
||||
? { homepage: "", documentation: "", support: "" }
|
||||
: await promptUrls();
|
||||
const visualAssets = nonInteractive
|
||||
? { icon: "", screenshots: [] }
|
||||
: await promptVisualAssets();
|
||||
const serverConfig = nonInteractive
|
||||
? getDefaultServerConfig(packageData)
|
||||
: await promptServerConfig(packageData);
|
||||
const toolsData = nonInteractive
|
||||
? { tools: [], toolsGenerated: false }
|
||||
: await promptTools();
|
||||
const promptsData = nonInteractive
|
||||
? { prompts: [], promptsGenerated: false }
|
||||
: await promptPrompts();
|
||||
const compatibility = nonInteractive
|
||||
? undefined
|
||||
: await promptCompatibility(serverConfig.serverType);
|
||||
const userConfig = nonInteractive ? {} : await promptUserConfig();
|
||||
const optionalFields = nonInteractive
|
||||
? getDefaultOptionalFields(packageData)
|
||||
: await promptOptionalFields(packageData);
|
||||
// Build manifest
|
||||
const manifest = buildManifest(basicInfo, longDescription, authorInfo, urls, visualAssets, serverConfig, toolsData.tools, toolsData.toolsGenerated, promptsData.prompts, promptsData.promptsGenerated, compatibility, userConfig, optionalFields);
|
||||
// Write manifest
|
||||
writeFileSync(manifestPath, JSON.stringify(manifest, null, 2) + "\n");
|
||||
console.log(`\nCreated manifest.json at ${manifestPath}`);
|
||||
printNextSteps();
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error && error.message.includes("User force closed")) {
|
||||
console.log("\nCancelled");
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
200
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/pack.js
generated
vendored
Normal file
200
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/pack.js
generated
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
import { confirm } from "@inquirer/prompts";
|
||||
import { createHash } from "crypto";
|
||||
import { zipSync } from "fflate";
|
||||
import { existsSync, mkdirSync, readFileSync, statSync, writeFileSync, } from "fs";
|
||||
import { basename, join, relative, resolve, sep } from "path";
|
||||
import { getAllFilesWithCount, readMcpbIgnorePatterns } from "../node/files.js";
|
||||
import { validateManifest } from "../node/validate.js";
|
||||
import { CURRENT_MANIFEST_VERSION, McpbManifestSchema } from "../schemas.js";
|
||||
import { getLogger } from "../shared/log.js";
|
||||
import { initExtension } from "./init.js";
|
||||
function formatFileSize(bytes) {
|
||||
if (bytes < 1024) {
|
||||
return `${bytes}B`;
|
||||
}
|
||||
else if (bytes < 1024 * 1024) {
|
||||
return `${(bytes / 1024).toFixed(1)}kB`;
|
||||
}
|
||||
else {
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
|
||||
}
|
||||
}
|
||||
function sanitizeNameForFilename(name) {
|
||||
// Replace spaces with hyphens
|
||||
// Remove or replace characters that are problematic in filenames
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, "-") // Replace spaces with hyphens
|
||||
.replace(/[^a-z0-9-_.]/g, "") // Keep only alphanumeric, hyphens, underscores, and dots
|
||||
.replace(/-+/g, "-") // Replace multiple hyphens with single hyphen
|
||||
.replace(/^-+|-+$/g, "") // Remove leading/trailing hyphens
|
||||
.substring(0, 100); // Limit length to 100 characters
|
||||
}
|
||||
export async function packExtension({ extensionPath, outputPath, silent, }) {
|
||||
const resolvedPath = resolve(extensionPath);
|
||||
const logger = getLogger({ silent });
|
||||
// Check if directory exists
|
||||
if (!existsSync(resolvedPath) || !statSync(resolvedPath).isDirectory()) {
|
||||
logger.error(`ERROR: Directory not found: ${extensionPath}`);
|
||||
return false;
|
||||
}
|
||||
// Check if manifest exists
|
||||
const manifestPath = join(resolvedPath, "manifest.json");
|
||||
if (!existsSync(manifestPath)) {
|
||||
logger.log(`No manifest.json found in ${extensionPath}`);
|
||||
const shouldInit = await confirm({
|
||||
message: "Would you like to create a manifest.json file?",
|
||||
default: true,
|
||||
});
|
||||
if (shouldInit) {
|
||||
const success = await initExtension(extensionPath);
|
||||
if (!success) {
|
||||
logger.error("ERROR: Failed to create manifest");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: Cannot pack extension without manifest.json");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Validate manifest first
|
||||
logger.log("Validating manifest...");
|
||||
if (!validateManifest(manifestPath)) {
|
||||
logger.error("ERROR: Cannot pack extension with invalid manifest");
|
||||
return false;
|
||||
}
|
||||
// Read and parse manifest
|
||||
let manifest;
|
||||
try {
|
||||
const manifestContent = readFileSync(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(manifestContent);
|
||||
manifest = McpbManifestSchema.parse(manifestData);
|
||||
}
|
||||
catch (error) {
|
||||
logger.error("ERROR: Failed to parse manifest.json");
|
||||
if (error instanceof Error) {
|
||||
logger.error(` ${error.message}`);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const manifestVersion = manifest.manifest_version || manifest.dxt_version;
|
||||
if (manifestVersion !== CURRENT_MANIFEST_VERSION) {
|
||||
logger.error(`ERROR: Manifest version mismatch. Expected "${CURRENT_MANIFEST_VERSION}", found "${manifestVersion}"`);
|
||||
logger.error(` Please update the manifest_version in your manifest.json to "${CURRENT_MANIFEST_VERSION}"`);
|
||||
return false;
|
||||
}
|
||||
// Determine output path
|
||||
const extensionName = basename(resolvedPath);
|
||||
const finalOutputPath = outputPath
|
||||
? resolve(outputPath)
|
||||
: resolve(`${extensionName}.mcpb`);
|
||||
// Ensure output directory exists
|
||||
const outputDir = join(finalOutputPath, "..");
|
||||
mkdirSync(outputDir, { recursive: true });
|
||||
try {
|
||||
// Read .mcpbignore patterns if present
|
||||
const mcpbIgnorePatterns = readMcpbIgnorePatterns(resolvedPath);
|
||||
// Get all files in the extension directory
|
||||
const { files, ignoredCount } = getAllFilesWithCount(resolvedPath, resolvedPath, {}, mcpbIgnorePatterns);
|
||||
// Print package header
|
||||
logger.log(`\n📦 ${manifest.name}@${manifest.version}`);
|
||||
// Print file list
|
||||
logger.log("Archive Contents");
|
||||
const fileEntries = Object.entries(files);
|
||||
let totalUnpackedSize = 0;
|
||||
// Sort files for consistent output
|
||||
fileEntries.sort(([a], [b]) => a.localeCompare(b));
|
||||
// Group files by directory for deep nesting
|
||||
const directoryGroups = new Map();
|
||||
const shallowFiles = [];
|
||||
for (const [filePath, fileData] of fileEntries) {
|
||||
const relPath = relative(resolvedPath, filePath);
|
||||
const content = fileData.data;
|
||||
const size = typeof content === "string"
|
||||
? Buffer.byteLength(content, "utf8")
|
||||
: content.length;
|
||||
totalUnpackedSize += size;
|
||||
// Check if file is deeply nested (3+ levels)
|
||||
const parts = relPath.split(sep);
|
||||
if (parts.length > 3) {
|
||||
// Group by the first 3 directory levels
|
||||
const groupKey = parts.slice(0, 3).join("/");
|
||||
if (!directoryGroups.has(groupKey)) {
|
||||
directoryGroups.set(groupKey, { files: [], totalSize: 0 });
|
||||
}
|
||||
const group = directoryGroups.get(groupKey);
|
||||
group.files.push(relPath);
|
||||
group.totalSize += size;
|
||||
}
|
||||
else {
|
||||
shallowFiles.push({ path: relPath, size });
|
||||
}
|
||||
}
|
||||
// Print shallow files first
|
||||
for (const { path, size } of shallowFiles) {
|
||||
logger.log(`${formatFileSize(size).padStart(8)} ${path}`);
|
||||
}
|
||||
// Print grouped directories
|
||||
for (const [dir, { files, totalSize }] of directoryGroups) {
|
||||
if (files.length === 1) {
|
||||
// If only one file in the group, print it normally
|
||||
const filePath = files[0];
|
||||
const fileSize = totalSize;
|
||||
logger.log(`${formatFileSize(fileSize).padStart(8)} ${filePath}`);
|
||||
}
|
||||
else {
|
||||
// Print directory summary
|
||||
logger.log(`${formatFileSize(totalSize).padStart(8)} ${dir}/ [and ${files.length} more files]`);
|
||||
}
|
||||
}
|
||||
// Create zip with preserved file permissions
|
||||
const zipFiles = {};
|
||||
const isUnix = process.platform !== "win32";
|
||||
for (const [filePath, fileData] of Object.entries(files)) {
|
||||
if (isUnix) {
|
||||
// Set external file attributes to preserve Unix permissions
|
||||
// The mode needs to be shifted to the upper 16 bits for ZIP format
|
||||
zipFiles[filePath] = [
|
||||
fileData.data,
|
||||
{ os: 3, attrs: (fileData.mode & 0o777) << 16 },
|
||||
];
|
||||
}
|
||||
else {
|
||||
// On Windows, use default ZIP attributes (no Unix permissions)
|
||||
zipFiles[filePath] = fileData.data;
|
||||
}
|
||||
}
|
||||
const zipData = zipSync(zipFiles, {
|
||||
level: 9, // Maximum compression
|
||||
mtime: new Date(),
|
||||
});
|
||||
// Write zip file
|
||||
writeFileSync(finalOutputPath, zipData);
|
||||
// Calculate SHA sum
|
||||
const shasum = createHash("sha1").update(zipData).digest("hex");
|
||||
// Print archive details
|
||||
const sanitizedName = sanitizeNameForFilename(manifest.name);
|
||||
const archiveName = `${sanitizedName}-${manifest.version}.mcpb`;
|
||||
logger.log("\nArchive Details");
|
||||
logger.log(`name: ${manifest.name}`);
|
||||
logger.log(`version: ${manifest.version}`);
|
||||
logger.log(`filename: ${archiveName}`);
|
||||
logger.log(`package size: ${formatFileSize(zipData.length)}`);
|
||||
logger.log(`unpacked size: ${formatFileSize(totalUnpackedSize)}`);
|
||||
logger.log(`shasum: ${shasum}`);
|
||||
logger.log(`total files: ${fileEntries.length}`);
|
||||
logger.log(`ignored (.mcpbignore) files: ${ignoredCount}`);
|
||||
logger.log(`\nOutput: ${finalOutputPath}`);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(`ERROR: Archive error: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: Unknown archive error occurred");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
101
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/unpack.js
generated
vendored
Normal file
101
extracted-source/node_modules/@anthropic-ai/mcpb/dist/cli/unpack.js
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
import { unzipSync } from "fflate";
|
||||
import { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync, } from "fs";
|
||||
import { join, resolve, sep } from "path";
|
||||
import { extractSignatureBlock } from "../node/sign.js";
|
||||
import { getLogger } from "../shared/log.js";
|
||||
export async function unpackExtension({ mcpbPath, outputDir, silent, }) {
|
||||
const logger = getLogger({ silent });
|
||||
const resolvedMcpbPath = resolve(mcpbPath);
|
||||
if (!existsSync(resolvedMcpbPath)) {
|
||||
logger.error(`ERROR: MCPB file not found: ${mcpbPath}`);
|
||||
return false;
|
||||
}
|
||||
const finalOutputDir = outputDir ? resolve(outputDir) : process.cwd();
|
||||
if (!existsSync(finalOutputDir)) {
|
||||
mkdirSync(finalOutputDir, { recursive: true });
|
||||
}
|
||||
try {
|
||||
const fileContent = readFileSync(resolvedMcpbPath);
|
||||
const { originalContent } = extractSignatureBlock(fileContent);
|
||||
// Parse file attributes from ZIP central directory
|
||||
const fileAttributes = new Map();
|
||||
const isUnix = process.platform !== "win32";
|
||||
if (isUnix) {
|
||||
// Parse ZIP central directory to extract file attributes
|
||||
const zipBuffer = originalContent;
|
||||
// Find end of central directory record
|
||||
let eocdOffset = -1;
|
||||
for (let i = zipBuffer.length - 22; i >= 0; i--) {
|
||||
if (zipBuffer.readUInt32LE(i) === 0x06054b50) {
|
||||
eocdOffset = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (eocdOffset !== -1) {
|
||||
const centralDirOffset = zipBuffer.readUInt32LE(eocdOffset + 16);
|
||||
const centralDirEntries = zipBuffer.readUInt16LE(eocdOffset + 8);
|
||||
let offset = centralDirOffset;
|
||||
for (let i = 0; i < centralDirEntries; i++) {
|
||||
if (zipBuffer.readUInt32LE(offset) === 0x02014b50) {
|
||||
const externalAttrs = zipBuffer.readUInt32LE(offset + 38);
|
||||
const filenameLength = zipBuffer.readUInt16LE(offset + 28);
|
||||
const filename = zipBuffer.toString("utf8", offset + 46, offset + 46 + filenameLength);
|
||||
// Extract Unix permissions from external attributes (upper 16 bits)
|
||||
const mode = (externalAttrs >> 16) & 0o777;
|
||||
if (mode > 0) {
|
||||
fileAttributes.set(filename, mode);
|
||||
}
|
||||
const extraFieldLength = zipBuffer.readUInt16LE(offset + 30);
|
||||
const commentLength = zipBuffer.readUInt16LE(offset + 32);
|
||||
offset += 46 + filenameLength + extraFieldLength + commentLength;
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const decompressed = unzipSync(originalContent);
|
||||
for (const relativePath in decompressed) {
|
||||
if (Object.prototype.hasOwnProperty.call(decompressed, relativePath)) {
|
||||
const data = decompressed[relativePath];
|
||||
const fullPath = join(finalOutputDir, relativePath);
|
||||
// Prevent zip slip attacks by validating the resolved path
|
||||
const normalizedPath = resolve(fullPath);
|
||||
const normalizedOutputDir = resolve(finalOutputDir);
|
||||
if (!normalizedPath.startsWith(normalizedOutputDir + sep) &&
|
||||
normalizedPath !== normalizedOutputDir) {
|
||||
throw new Error(`Path traversal attempt detected: ${relativePath}`);
|
||||
}
|
||||
const dir = join(fullPath, "..");
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
writeFileSync(fullPath, data);
|
||||
// Restore Unix file permissions if available
|
||||
if (isUnix && fileAttributes.has(relativePath)) {
|
||||
try {
|
||||
const mode = fileAttributes.get(relativePath);
|
||||
if (mode !== undefined) {
|
||||
chmodSync(fullPath, mode);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Silently ignore permission errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.log(`Extension unpacked successfully to ${finalOutputDir}`);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
logger.error(`ERROR: Failed to unpack extension: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
logger.error("ERROR: An unknown error occurred during unpacking.");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
10
extracted-source/node_modules/@anthropic-ai/mcpb/dist/index.js
generated
vendored
Normal file
10
extracted-source/node_modules/@anthropic-ai/mcpb/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
// Default export includes everything (backward compatibility)
|
||||
export * from "./cli/init.js";
|
||||
export * from "./cli/pack.js";
|
||||
export * from "./cli/unpack.js";
|
||||
export * from "./node/files.js";
|
||||
export * from "./node/sign.js";
|
||||
export * from "./node/validate.js";
|
||||
export * from "./schemas.js";
|
||||
export * from "./shared/config.js";
|
||||
export * from "./types.js";
|
||||
115
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/files.js
generated
vendored
Normal file
115
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/files.js
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
import { existsSync, readdirSync, readFileSync, statSync } from "fs";
|
||||
import ignore from "ignore";
|
||||
import { join, relative, sep } from "path";
|
||||
// Files/patterns to exclude from the package
|
||||
export const EXCLUDE_PATTERNS = [
|
||||
".DS_Store",
|
||||
"Thumbs.db",
|
||||
".gitignore",
|
||||
".git",
|
||||
".mcpbignore",
|
||||
"*.log",
|
||||
".env*",
|
||||
".npm",
|
||||
".npmrc",
|
||||
".yarnrc",
|
||||
".yarn",
|
||||
".eslintrc",
|
||||
".editorconfig",
|
||||
".prettierrc",
|
||||
".prettierignore",
|
||||
".eslintignore",
|
||||
".nycrc",
|
||||
".babelrc",
|
||||
".pnp.*",
|
||||
"node_modules/.cache",
|
||||
"node_modules/.bin",
|
||||
"*.map",
|
||||
".env.local",
|
||||
".env.*.local",
|
||||
"npm-debug.log*",
|
||||
"yarn-debug.log*",
|
||||
"yarn-error.log*",
|
||||
"package-lock.json",
|
||||
"yarn.lock",
|
||||
"*.mcpb",
|
||||
"*.d.ts",
|
||||
"*.tsbuildinfo",
|
||||
"tsconfig.json",
|
||||
];
|
||||
/**
|
||||
* Read and parse .mcpbignore file patterns
|
||||
*/
|
||||
export function readMcpbIgnorePatterns(baseDir) {
|
||||
const mcpbIgnorePath = join(baseDir, ".mcpbignore");
|
||||
if (!existsSync(mcpbIgnorePath)) {
|
||||
return [];
|
||||
}
|
||||
try {
|
||||
const content = readFileSync(mcpbIgnorePath, "utf-8");
|
||||
return content
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0 && !line.startsWith("#"));
|
||||
}
|
||||
catch (error) {
|
||||
console.warn(`Warning: Could not read .mcpbignore file: ${error instanceof Error ? error.message : "Unknown error"}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
function buildIgnoreChecker(additionalPatterns) {
|
||||
return ignore().add(EXCLUDE_PATTERNS).add(additionalPatterns);
|
||||
}
|
||||
/**
|
||||
* Used for testing, calls the same methods as the other ignore checks
|
||||
*/
|
||||
export function shouldExclude(filePath, additionalPatterns = []) {
|
||||
return buildIgnoreChecker(additionalPatterns).ignores(filePath);
|
||||
}
|
||||
export function getAllFiles(dirPath, baseDir = dirPath, fileList = {}, additionalPatterns = []) {
|
||||
const files = readdirSync(dirPath);
|
||||
const ignoreChecker = buildIgnoreChecker(additionalPatterns);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file);
|
||||
const relativePath = relative(baseDir, filePath);
|
||||
if (ignoreChecker.ignores(relativePath)) {
|
||||
continue;
|
||||
}
|
||||
const stat = statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
getAllFiles(filePath, baseDir, fileList, additionalPatterns);
|
||||
}
|
||||
else {
|
||||
// Use forward slashes in zip file paths
|
||||
const zipPath = relativePath.split(sep).join("/");
|
||||
fileList[zipPath] = readFileSync(filePath);
|
||||
}
|
||||
}
|
||||
return fileList;
|
||||
}
|
||||
export function getAllFilesWithCount(dirPath, baseDir = dirPath, fileList = {}, additionalPatterns = [], ignoredCount = 0) {
|
||||
const files = readdirSync(dirPath);
|
||||
const ignoreChecker = buildIgnoreChecker(additionalPatterns);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file);
|
||||
const relativePath = relative(baseDir, filePath);
|
||||
if (ignoreChecker.ignores(relativePath)) {
|
||||
ignoredCount++;
|
||||
continue;
|
||||
}
|
||||
const stat = statSync(filePath);
|
||||
if (stat.isDirectory()) {
|
||||
const result = getAllFilesWithCount(filePath, baseDir, fileList, additionalPatterns, ignoredCount);
|
||||
ignoredCount = result.ignoredCount;
|
||||
}
|
||||
else {
|
||||
// Use forward slashes in zip file paths
|
||||
const zipPath = relativePath.split(sep).join("/");
|
||||
fileList[zipPath] = {
|
||||
data: readFileSync(filePath),
|
||||
mode: stat.mode,
|
||||
};
|
||||
}
|
||||
}
|
||||
return { files: fileList, ignoredCount };
|
||||
}
|
||||
333
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/sign.js
generated
vendored
Normal file
333
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/sign.js
generated
vendored
Normal file
@@ -0,0 +1,333 @@
|
||||
import { execFile } from "child_process";
|
||||
import { readFileSync, writeFileSync } from "fs";
|
||||
import { mkdtemp, rm, writeFile } from "fs/promises";
|
||||
import forge from "node-forge";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path";
|
||||
import { promisify } from "util";
|
||||
// Signature block markers
|
||||
const SIGNATURE_HEADER = "MCPB_SIG_V1";
|
||||
const SIGNATURE_FOOTER = "MCPB_SIG_END";
|
||||
const execFileAsync = promisify(execFile);
|
||||
/**
|
||||
* Signs a MCPB file with the given certificate and private key using PKCS#7
|
||||
*
|
||||
* @param mcpbPath Path to the MCPB file to sign
|
||||
* @param certPath Path to the certificate file (PEM format)
|
||||
* @param keyPath Path to the private key file (PEM format)
|
||||
* @param intermediates Optional array of intermediate certificate paths
|
||||
*/
|
||||
export function signMcpbFile(mcpbPath, certPath, keyPath, intermediates) {
|
||||
// Read the original MCPB file
|
||||
const mcpbContent = readFileSync(mcpbPath);
|
||||
// Read certificate and key
|
||||
const certificatePem = readFileSync(certPath, "utf-8");
|
||||
const privateKeyPem = readFileSync(keyPath, "utf-8");
|
||||
// Read intermediate certificates if provided
|
||||
const intermediatePems = intermediates?.map((path) => readFileSync(path, "utf-8"));
|
||||
// Create PKCS#7 signed data
|
||||
const p7 = forge.pkcs7.createSignedData();
|
||||
p7.content = forge.util.createBuffer(mcpbContent);
|
||||
// Parse and add certificates
|
||||
const signingCert = forge.pki.certificateFromPem(certificatePem);
|
||||
const privateKey = forge.pki.privateKeyFromPem(privateKeyPem);
|
||||
p7.addCertificate(signingCert);
|
||||
// Add intermediate certificates
|
||||
if (intermediatePems) {
|
||||
for (const pem of intermediatePems) {
|
||||
p7.addCertificate(forge.pki.certificateFromPem(pem));
|
||||
}
|
||||
}
|
||||
// Add signer
|
||||
p7.addSigner({
|
||||
key: privateKey,
|
||||
certificate: signingCert,
|
||||
digestAlgorithm: forge.pki.oids.sha256,
|
||||
authenticatedAttributes: [
|
||||
{
|
||||
type: forge.pki.oids.contentType,
|
||||
value: forge.pki.oids.data,
|
||||
},
|
||||
{
|
||||
type: forge.pki.oids.messageDigest,
|
||||
// Value will be auto-populated
|
||||
},
|
||||
{
|
||||
type: forge.pki.oids.signingTime,
|
||||
// Value will be auto-populated with current time
|
||||
},
|
||||
],
|
||||
});
|
||||
// Sign with detached signature
|
||||
p7.sign({ detached: true });
|
||||
// Convert to DER format
|
||||
const asn1 = forge.asn1.toDer(p7.toAsn1());
|
||||
const pkcs7Signature = Buffer.from(asn1.getBytes(), "binary");
|
||||
// Create signature block with PKCS#7 data
|
||||
const signatureBlock = createSignatureBlock(pkcs7Signature);
|
||||
// Append signature block to MCPB file
|
||||
const signedContent = Buffer.concat([mcpbContent, signatureBlock]);
|
||||
writeFileSync(mcpbPath, signedContent);
|
||||
}
|
||||
/**
|
||||
* Verifies a signed MCPB file using OS certificate store
|
||||
*
|
||||
* @param mcpbPath Path to the signed MCPB file
|
||||
* @returns Signature information including verification status
|
||||
*/
|
||||
export async function verifyMcpbFile(mcpbPath) {
|
||||
try {
|
||||
const fileContent = readFileSync(mcpbPath);
|
||||
// Find and extract signature block
|
||||
const { originalContent, pkcs7Signature } = extractSignatureBlock(fileContent);
|
||||
if (!pkcs7Signature) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Parse PKCS#7 signature
|
||||
const asn1 = forge.asn1.fromDer(pkcs7Signature.toString("binary"));
|
||||
const p7Message = forge.pkcs7.messageFromAsn1(asn1);
|
||||
// Verify it's signed data and cast to correct type
|
||||
if (!("type" in p7Message) ||
|
||||
p7Message.type !== forge.pki.oids.signedData) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Now we know it's PkcsSignedData. The types are incorrect, so we'll
|
||||
// fix them there
|
||||
const p7 = p7Message;
|
||||
// Extract certificates from PKCS#7
|
||||
const certificates = p7.certificates || [];
|
||||
if (certificates.length === 0) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Get the signing certificate (first one)
|
||||
const signingCert = certificates[0];
|
||||
// Verify PKCS#7 signature
|
||||
const contentBuf = forge.util.createBuffer(originalContent);
|
||||
try {
|
||||
p7.verify({ authenticatedAttributes: true });
|
||||
// Also verify the content matches
|
||||
const signerInfos = p7.signerInfos;
|
||||
const signerInfo = signerInfos?.[0];
|
||||
if (signerInfo) {
|
||||
const md = forge.md.sha256.create();
|
||||
md.update(contentBuf.getBytes());
|
||||
const digest = md.digest().getBytes();
|
||||
// Find the message digest attribute
|
||||
let messageDigest = null;
|
||||
for (const attr of signerInfo.authenticatedAttributes) {
|
||||
if (attr.type === forge.pki.oids.messageDigest) {
|
||||
messageDigest = attr.value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!messageDigest || messageDigest !== digest) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Convert forge certificate to PEM for OS verification
|
||||
const certPem = forge.pki.certificateToPem(signingCert);
|
||||
const intermediatePems = certificates
|
||||
.slice(1)
|
||||
.map((cert) => Buffer.from(forge.pki.certificateToPem(cert)));
|
||||
// Verify certificate chain against OS trust store
|
||||
const chainValid = await verifyCertificateChain(Buffer.from(certPem), intermediatePems);
|
||||
if (!chainValid) {
|
||||
// Signature is valid but certificate is not trusted
|
||||
return { status: "unsigned" };
|
||||
}
|
||||
// Extract certificate info
|
||||
const isSelfSigned = signingCert.issuer.getField("CN")?.value ===
|
||||
signingCert.subject.getField("CN")?.value;
|
||||
return {
|
||||
status: isSelfSigned ? "self-signed" : "signed",
|
||||
publisher: signingCert.subject.getField("CN")?.value || "Unknown",
|
||||
issuer: signingCert.issuer.getField("CN")?.value || "Unknown",
|
||||
valid_from: signingCert.validity.notBefore.toISOString(),
|
||||
valid_to: signingCert.validity.notAfter.toISOString(),
|
||||
fingerprint: forge.md.sha256
|
||||
.create()
|
||||
.update(forge.asn1.toDer(forge.pki.certificateToAsn1(signingCert)).getBytes())
|
||||
.digest()
|
||||
.toHex(),
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to verify MCPB file: ${error}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Creates a signature block buffer with PKCS#7 signature
|
||||
*/
|
||||
function createSignatureBlock(pkcs7Signature) {
|
||||
const parts = [];
|
||||
// Header
|
||||
parts.push(Buffer.from(SIGNATURE_HEADER, "utf-8"));
|
||||
// PKCS#7 signature length and data
|
||||
const sigLengthBuffer = Buffer.alloc(4);
|
||||
sigLengthBuffer.writeUInt32LE(pkcs7Signature.length, 0);
|
||||
parts.push(sigLengthBuffer);
|
||||
parts.push(pkcs7Signature);
|
||||
// Footer
|
||||
parts.push(Buffer.from(SIGNATURE_FOOTER, "utf-8"));
|
||||
return Buffer.concat(parts);
|
||||
}
|
||||
/**
|
||||
* Extracts the signature block from a signed MCPB file
|
||||
*/
|
||||
export function extractSignatureBlock(fileContent) {
|
||||
// Look for signature footer at the end
|
||||
const footerBytes = Buffer.from(SIGNATURE_FOOTER, "utf-8");
|
||||
const footerIndex = fileContent.lastIndexOf(footerBytes);
|
||||
if (footerIndex === -1) {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
// Look for signature header before footer
|
||||
const headerBytes = Buffer.from(SIGNATURE_HEADER, "utf-8");
|
||||
let headerIndex = -1;
|
||||
// Search backwards from footer
|
||||
for (let i = footerIndex - 1; i >= 0; i--) {
|
||||
if (fileContent.slice(i, i + headerBytes.length).equals(headerBytes)) {
|
||||
headerIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (headerIndex === -1) {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
// Extract original content (everything before signature block)
|
||||
const originalContent = fileContent.slice(0, headerIndex);
|
||||
// Parse signature block
|
||||
let offset = headerIndex + headerBytes.length;
|
||||
try {
|
||||
// Read PKCS#7 signature length
|
||||
const sigLength = fileContent.readUInt32LE(offset);
|
||||
offset += 4;
|
||||
// Read PKCS#7 signature
|
||||
const pkcs7Signature = fileContent.slice(offset, offset + sigLength);
|
||||
return {
|
||||
originalContent,
|
||||
pkcs7Signature,
|
||||
};
|
||||
}
|
||||
catch {
|
||||
return { originalContent: fileContent };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Verifies certificate chain against OS trust store
|
||||
*/
|
||||
export async function verifyCertificateChain(certificate, intermediates) {
|
||||
let tempDir = null;
|
||||
try {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "mcpb-verify-"));
|
||||
const certChainPath = join(tempDir, "chain.pem");
|
||||
const certChain = [certificate, ...(intermediates || [])].join("\n");
|
||||
await writeFile(certChainPath, certChain);
|
||||
// Platform-specific verification
|
||||
if (process.platform === "darwin") {
|
||||
try {
|
||||
await execFileAsync("security", [
|
||||
"verify-cert",
|
||||
"-c",
|
||||
certChainPath,
|
||||
"-p",
|
||||
"codeSign",
|
||||
]);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (process.platform === "win32") {
|
||||
const psCommand = `
|
||||
$ErrorActionPreference = 'Stop'
|
||||
$certCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
|
||||
$certCollection.Import('${certChainPath}')
|
||||
|
||||
if ($certCollection.Count -eq 0) {
|
||||
Write-Error 'No certificates found'
|
||||
exit 1
|
||||
}
|
||||
|
||||
$leafCert = $certCollection[0]
|
||||
$chain = New-Object System.Security.Cryptography.X509Certificates.X509Chain
|
||||
|
||||
# Enable revocation checking
|
||||
$chain.ChainPolicy.RevocationMode = 'Online'
|
||||
$chain.ChainPolicy.RevocationFlag = 'EntireChain'
|
||||
$chain.ChainPolicy.UrlRetrievalTimeout = New-TimeSpan -Seconds 30
|
||||
|
||||
# Add code signing application policy
|
||||
$codeSignOid = New-Object System.Security.Cryptography.Oid '1.3.6.1.5.5.7.3.3'
|
||||
$chain.ChainPolicy.ApplicationPolicy.Add($codeSignOid)
|
||||
|
||||
# Add intermediate certificates to extra store
|
||||
for ($i = 1; $i -lt $certCollection.Count; $i++) {
|
||||
[void]$chain.ChainPolicy.ExtraStore.Add($certCollection[$i])
|
||||
}
|
||||
|
||||
# Build and validate chain
|
||||
$result = $chain.Build($leafCert)
|
||||
|
||||
if ($result) {
|
||||
'Valid'
|
||||
} else {
|
||||
$chain.ChainStatus | ForEach-Object {
|
||||
Write-Error "$($_.Status): $($_.StatusInformation)"
|
||||
}
|
||||
exit 1
|
||||
}
|
||||
`.trim();
|
||||
const { stdout } = await execFileAsync("powershell.exe", [
|
||||
"-NoProfile",
|
||||
"-NonInteractive",
|
||||
"-Command",
|
||||
psCommand,
|
||||
]);
|
||||
return stdout.includes("Valid");
|
||||
}
|
||||
else {
|
||||
// Linux: Use openssl
|
||||
try {
|
||||
await execFileAsync("openssl", [
|
||||
"verify",
|
||||
"-purpose",
|
||||
"codesigning",
|
||||
"-CApath",
|
||||
"/etc/ssl/certs",
|
||||
certChainPath,
|
||||
]);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
finally {
|
||||
if (tempDir) {
|
||||
try {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes signature from a MCPB file
|
||||
*/
|
||||
export function unsignMcpbFile(mcpbPath) {
|
||||
const fileContent = readFileSync(mcpbPath);
|
||||
const { originalContent } = extractSignatureBlock(fileContent);
|
||||
writeFileSync(mcpbPath, originalContent);
|
||||
}
|
||||
124
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/validate.js
generated
vendored
Normal file
124
extracted-source/node_modules/@anthropic-ai/mcpb/dist/node/validate.js
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
import { existsSync, readFileSync, statSync } from "fs";
|
||||
import * as fs from "fs/promises";
|
||||
import { DestroyerOfModules } from "galactus";
|
||||
import * as os from "os";
|
||||
import { join, resolve } from "path";
|
||||
import prettyBytes from "pretty-bytes";
|
||||
import { unpackExtension } from "../cli/unpack.js";
|
||||
import { McpbManifestSchema } from "../schemas.js";
|
||||
import { McpbManifestSchema as LooseMcpbManifestSchema } from "../schemas-loose.js";
|
||||
export function validateManifest(inputPath) {
|
||||
try {
|
||||
const resolvedPath = resolve(inputPath);
|
||||
let manifestPath = resolvedPath;
|
||||
// If input is a directory, look for manifest.json inside it
|
||||
if (existsSync(resolvedPath) && statSync(resolvedPath).isDirectory()) {
|
||||
manifestPath = join(resolvedPath, "manifest.json");
|
||||
}
|
||||
const manifestContent = readFileSync(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(manifestContent);
|
||||
const result = McpbManifestSchema.safeParse(manifestData);
|
||||
if (result.success) {
|
||||
console.log("Manifest schema validation passes!");
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
console.log("ERROR: Manifest validation failed:\n");
|
||||
result.error.issues.forEach((issue) => {
|
||||
const path = issue.path.join(".");
|
||||
console.log(` - ${path ? `${path}: ` : ""}${issue.message}`);
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ENOENT")) {
|
||||
console.error(`ERROR: File not found: ${inputPath}`);
|
||||
if (existsSync(resolve(inputPath)) &&
|
||||
statSync(resolve(inputPath)).isDirectory()) {
|
||||
console.error(` (No manifest.json found in directory)`);
|
||||
}
|
||||
}
|
||||
else if (error.message.includes("JSON")) {
|
||||
console.error(`ERROR: Invalid JSON in manifest file: ${error.message}`);
|
||||
}
|
||||
else {
|
||||
console.error(`ERROR: Error reading manifest: ${error.message}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.error("ERROR: Unknown error occurred");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
export async function cleanMcpb(inputPath) {
|
||||
const tmpDir = await fs.mkdtemp(resolve(os.tmpdir(), "mcpb-clean-"));
|
||||
const mcpbPath = resolve(tmpDir, "in.mcpb");
|
||||
const unpackPath = resolve(tmpDir, "out");
|
||||
console.log(" -- Cleaning MCPB...");
|
||||
try {
|
||||
await fs.copyFile(inputPath, mcpbPath);
|
||||
console.log(" -- Unpacking MCPB...");
|
||||
await unpackExtension({ mcpbPath, silent: true, outputDir: unpackPath });
|
||||
const manifestPath = resolve(unpackPath, "manifest.json");
|
||||
const originalManifest = await fs.readFile(manifestPath, "utf-8");
|
||||
const manifestData = JSON.parse(originalManifest);
|
||||
const result = LooseMcpbManifestSchema.safeParse(manifestData);
|
||||
if (!result.success) {
|
||||
throw new Error(`Unrecoverable manifest issues, please run "mcpb validate"`);
|
||||
}
|
||||
await fs.writeFile(manifestPath, JSON.stringify(result.data, null, 2));
|
||||
if (originalManifest.trim() !==
|
||||
(await fs.readFile(manifestPath, "utf8")).trim()) {
|
||||
console.log(" -- Update manifest to be valid per MCPB schema");
|
||||
}
|
||||
else {
|
||||
console.log(" -- Manifest already valid per MCPB schema");
|
||||
}
|
||||
const nodeModulesPath = resolve(unpackPath, "node_modules");
|
||||
if (existsSync(nodeModulesPath)) {
|
||||
console.log(" -- node_modules found, deleting development dependencies");
|
||||
const destroyer = new DestroyerOfModules({
|
||||
rootDirectory: unpackPath,
|
||||
});
|
||||
try {
|
||||
await destroyer.destroy();
|
||||
}
|
||||
catch (error) {
|
||||
// If modules have already been deleted in a previous clean, the walker
|
||||
// will fail when it can't find required dependencies. This is expected
|
||||
// and safe to ignore.
|
||||
if (error instanceof Error &&
|
||||
error.message.includes("Failed to locate module")) {
|
||||
console.log(" -- Some modules already removed, skipping remaining cleanup");
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
console.log(" -- Removed development dependencies from node_modules");
|
||||
}
|
||||
else {
|
||||
console.log(" -- No node_modules, not pruning");
|
||||
}
|
||||
const before = await fs.stat(inputPath);
|
||||
const { packExtension } = await import("../cli/pack.js");
|
||||
await packExtension({
|
||||
extensionPath: unpackPath,
|
||||
outputPath: inputPath,
|
||||
silent: true,
|
||||
});
|
||||
const after = await fs.stat(inputPath);
|
||||
console.log("\nClean Complete:");
|
||||
console.log("Before:", prettyBytes(before.size));
|
||||
console.log("After:", prettyBytes(after.size));
|
||||
}
|
||||
finally {
|
||||
await fs.rm(tmpDir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
105
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas-loose.js
generated
vendored
Normal file
105
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas-loose.js
generated
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
import * as z from "zod";
|
||||
export const McpServerConfigSchema = z.object({
|
||||
command: z.string(),
|
||||
args: z.array(z.string()).optional(),
|
||||
env: z.record(z.string(), z.string()).optional(),
|
||||
});
|
||||
export const McpbManifestAuthorSchema = z.object({
|
||||
name: z.string(),
|
||||
email: z.string().email().optional(),
|
||||
url: z.string().url().optional(),
|
||||
});
|
||||
export const McpbManifestRepositorySchema = z.object({
|
||||
type: z.string(),
|
||||
url: z.string().url(),
|
||||
});
|
||||
export const McpbManifestPlatformOverrideSchema = McpServerConfigSchema.partial();
|
||||
export const McpbManifestMcpConfigSchema = McpServerConfigSchema.extend({
|
||||
platform_overrides: z
|
||||
.record(z.string(), McpbManifestPlatformOverrideSchema)
|
||||
.optional(),
|
||||
});
|
||||
export const McpbManifestServerSchema = z.object({
|
||||
type: z.enum(["python", "node", "binary"]),
|
||||
entry_point: z.string(),
|
||||
mcp_config: McpbManifestMcpConfigSchema,
|
||||
});
|
||||
export const McpbManifestCompatibilitySchema = z
|
||||
.object({
|
||||
claude_desktop: z.string().optional(),
|
||||
platforms: z.array(z.enum(["darwin", "win32", "linux"])).optional(),
|
||||
runtimes: z
|
||||
.object({
|
||||
python: z.string().optional(),
|
||||
node: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.passthrough();
|
||||
export const McpbManifestToolSchema = z.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
});
|
||||
export const McpbManifestPromptSchema = z.object({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
arguments: z.array(z.string()).optional(),
|
||||
text: z.string(),
|
||||
});
|
||||
export const McpbUserConfigurationOptionSchema = z.object({
|
||||
type: z.enum(["string", "number", "boolean", "directory", "file"]),
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
required: z.boolean().optional(),
|
||||
default: z
|
||||
.union([z.string(), z.number(), z.boolean(), z.array(z.string())])
|
||||
.optional(),
|
||||
multiple: z.boolean().optional(),
|
||||
sensitive: z.boolean().optional(),
|
||||
min: z.number().optional(),
|
||||
max: z.number().optional(),
|
||||
});
|
||||
export const McpbUserConfigValuesSchema = z.record(z.string(), z.union([z.string(), z.number(), z.boolean(), z.array(z.string())]));
|
||||
export const McpbManifestSchema = z
|
||||
.object({
|
||||
$schema: z.string().optional(),
|
||||
dxt_version: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("@deprecated Use manifest_version instead"),
|
||||
manifest_version: z.string().optional(),
|
||||
name: z.string(),
|
||||
display_name: z.string().optional(),
|
||||
version: z.string(),
|
||||
description: z.string(),
|
||||
long_description: z.string().optional(),
|
||||
author: McpbManifestAuthorSchema,
|
||||
repository: McpbManifestRepositorySchema.optional(),
|
||||
homepage: z.string().url().optional(),
|
||||
documentation: z.string().url().optional(),
|
||||
support: z.string().url().optional(),
|
||||
icon: z.string().optional(),
|
||||
screenshots: z.array(z.string()).optional(),
|
||||
server: McpbManifestServerSchema,
|
||||
tools: z.array(McpbManifestToolSchema).optional(),
|
||||
tools_generated: z.boolean().optional(),
|
||||
prompts: z.array(McpbManifestPromptSchema).optional(),
|
||||
prompts_generated: z.boolean().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
license: z.string().optional(),
|
||||
compatibility: McpbManifestCompatibilitySchema.optional(),
|
||||
user_config: z
|
||||
.record(z.string(), McpbUserConfigurationOptionSchema)
|
||||
.optional(),
|
||||
})
|
||||
.refine((data) => !!(data.dxt_version || data.manifest_version), {
|
||||
message: "Either 'dxt_version' (deprecated) or 'manifest_version' must be provided",
|
||||
});
|
||||
export const McpbSignatureInfoSchema = z.object({
|
||||
status: z.enum(["signed", "unsigned", "self-signed"]),
|
||||
publisher: z.string().optional(),
|
||||
issuer: z.string().optional(),
|
||||
valid_from: z.string().optional(),
|
||||
valid_to: z.string().optional(),
|
||||
fingerprint: z.string().optional(),
|
||||
});
|
||||
107
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas.js
generated
vendored
Normal file
107
extracted-source/node_modules/@anthropic-ai/mcpb/dist/schemas.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import * as z from "zod";
|
||||
export const CURRENT_MANIFEST_VERSION = "0.2";
|
||||
export const McpServerConfigSchema = z.strictObject({
|
||||
command: z.string(),
|
||||
args: z.array(z.string()).optional(),
|
||||
env: z.record(z.string(), z.string()).optional(),
|
||||
});
|
||||
export const McpbManifestAuthorSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
email: z.string().email().optional(),
|
||||
url: z.string().url().optional(),
|
||||
});
|
||||
export const McpbManifestRepositorySchema = z.strictObject({
|
||||
type: z.string(),
|
||||
url: z.string().url(),
|
||||
});
|
||||
export const McpbManifestPlatformOverrideSchema = McpServerConfigSchema.partial();
|
||||
export const McpbManifestMcpConfigSchema = McpServerConfigSchema.extend({
|
||||
platform_overrides: z
|
||||
.record(z.string(), McpbManifestPlatformOverrideSchema)
|
||||
.optional(),
|
||||
});
|
||||
export const McpbManifestServerSchema = z.strictObject({
|
||||
type: z.enum(["python", "node", "binary"]),
|
||||
entry_point: z.string(),
|
||||
mcp_config: McpbManifestMcpConfigSchema,
|
||||
});
|
||||
export const McpbManifestCompatibilitySchema = z
|
||||
.strictObject({
|
||||
claude_desktop: z.string().optional(),
|
||||
platforms: z.array(z.enum(["darwin", "win32", "linux"])).optional(),
|
||||
runtimes: z
|
||||
.strictObject({
|
||||
python: z.string().optional(),
|
||||
node: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.passthrough();
|
||||
export const McpbManifestToolSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
});
|
||||
export const McpbManifestPromptSchema = z.strictObject({
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
arguments: z.array(z.string()).optional(),
|
||||
text: z.string(),
|
||||
});
|
||||
export const McpbUserConfigurationOptionSchema = z.strictObject({
|
||||
type: z.enum(["string", "number", "boolean", "directory", "file"]),
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
required: z.boolean().optional(),
|
||||
default: z
|
||||
.union([z.string(), z.number(), z.boolean(), z.array(z.string())])
|
||||
.optional(),
|
||||
multiple: z.boolean().optional(),
|
||||
sensitive: z.boolean().optional(),
|
||||
min: z.number().optional(),
|
||||
max: z.number().optional(),
|
||||
});
|
||||
export const McpbUserConfigValuesSchema = z.record(z.string(), z.union([z.string(), z.number(), z.boolean(), z.array(z.string())]));
|
||||
export const McpbManifestSchema = z
|
||||
.strictObject({
|
||||
$schema: z.string().optional(),
|
||||
dxt_version: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("@deprecated Use manifest_version instead"),
|
||||
manifest_version: z.string().optional(),
|
||||
name: z.string(),
|
||||
display_name: z.string().optional(),
|
||||
version: z.string(),
|
||||
description: z.string(),
|
||||
long_description: z.string().optional(),
|
||||
author: McpbManifestAuthorSchema,
|
||||
repository: McpbManifestRepositorySchema.optional(),
|
||||
homepage: z.string().url().optional(),
|
||||
documentation: z.string().url().optional(),
|
||||
support: z.string().url().optional(),
|
||||
icon: z.string().optional(),
|
||||
screenshots: z.array(z.string()).optional(),
|
||||
server: McpbManifestServerSchema,
|
||||
tools: z.array(McpbManifestToolSchema).optional(),
|
||||
tools_generated: z.boolean().optional(),
|
||||
prompts: z.array(McpbManifestPromptSchema).optional(),
|
||||
prompts_generated: z.boolean().optional(),
|
||||
keywords: z.array(z.string()).optional(),
|
||||
license: z.string().optional(),
|
||||
privacy_policies: z.array(z.string()).optional(),
|
||||
compatibility: McpbManifestCompatibilitySchema.optional(),
|
||||
user_config: z
|
||||
.record(z.string(), McpbUserConfigurationOptionSchema)
|
||||
.optional(),
|
||||
})
|
||||
.refine((data) => !!(data.dxt_version || data.manifest_version), {
|
||||
message: "Either 'dxt_version' (deprecated) or 'manifest_version' must be provided",
|
||||
});
|
||||
export const McpbSignatureInfoSchema = z.strictObject({
|
||||
status: z.enum(["signed", "unsigned", "self-signed"]),
|
||||
publisher: z.string().optional(),
|
||||
issuer: z.string().optional(),
|
||||
valid_from: z.string().optional(),
|
||||
valid_to: z.string().optional(),
|
||||
fingerprint: z.string().optional(),
|
||||
});
|
||||
157
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/config.js
generated
vendored
Normal file
157
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/config.js
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
/**
|
||||
* This file contains utility functions for handling MCPB configuration,
|
||||
* including variable replacement and MCP server configuration generation.
|
||||
*/
|
||||
/**
|
||||
* Recursively replaces variables in any value. Handles strings, arrays, and objects.
|
||||
*
|
||||
* @param value The value to process
|
||||
* @param variables Object containing variable replacements
|
||||
* @returns The processed value with all variables replaced
|
||||
*/
|
||||
export function replaceVariables(value, variables) {
|
||||
if (typeof value === "string") {
|
||||
let result = value;
|
||||
// Replace all variables in the string
|
||||
for (const [key, replacement] of Object.entries(variables)) {
|
||||
const pattern = new RegExp(`\\$\\{${key}\\}`, "g");
|
||||
// Check if this pattern actually exists in the string
|
||||
if (result.match(pattern)) {
|
||||
if (Array.isArray(replacement)) {
|
||||
console.warn(`Cannot replace ${key} with array value in string context: "${value}"`, { key, replacement });
|
||||
}
|
||||
else {
|
||||
result = result.replace(pattern, replacement);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
else if (Array.isArray(value)) {
|
||||
// For arrays, we need to handle special case of array expansion
|
||||
const result = [];
|
||||
for (const item of value) {
|
||||
if (typeof item === "string" &&
|
||||
item.match(/^\$\{user_config\.[^}]+\}$/)) {
|
||||
// This is a user config variable that might expand to multiple values
|
||||
const varName = item.match(/^\$\{([^}]+)\}$/)?.[1];
|
||||
if (varName && variables[varName]) {
|
||||
const replacement = variables[varName];
|
||||
if (Array.isArray(replacement)) {
|
||||
// Expand array inline
|
||||
result.push(...replacement);
|
||||
}
|
||||
else {
|
||||
result.push(replacement);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Variable not found, keep original
|
||||
result.push(item);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Recursively process non-variable items
|
||||
result.push(replaceVariables(item, variables));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
else if (value && typeof value === "object") {
|
||||
const result = {};
|
||||
for (const [key, val] of Object.entries(value)) {
|
||||
result[key] = replaceVariables(val, variables);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
export async function getMcpConfigForManifest(options) {
|
||||
const { manifest, extensionPath, systemDirs, userConfig, pathSeparator, logger, } = options;
|
||||
const baseConfig = manifest.server?.mcp_config;
|
||||
if (!baseConfig) {
|
||||
return undefined;
|
||||
}
|
||||
let result = {
|
||||
...baseConfig,
|
||||
};
|
||||
if (baseConfig.platform_overrides) {
|
||||
if (process.platform in baseConfig.platform_overrides) {
|
||||
const platformConfig = baseConfig.platform_overrides[process.platform];
|
||||
result.command = platformConfig.command || result.command;
|
||||
result.args = platformConfig.args || result.args;
|
||||
result.env = platformConfig.env || result.env;
|
||||
}
|
||||
}
|
||||
// Check if required configuration is missing
|
||||
if (hasRequiredConfigMissing({ manifest, userConfig })) {
|
||||
logger?.warn(`Extension ${manifest.name} has missing required configuration, skipping MCP config`);
|
||||
return undefined;
|
||||
}
|
||||
const variables = {
|
||||
__dirname: extensionPath,
|
||||
pathSeparator,
|
||||
"/": pathSeparator,
|
||||
...systemDirs,
|
||||
};
|
||||
// Build merged configuration from defaults and user settings
|
||||
const mergedConfig = {};
|
||||
// First, add defaults from manifest
|
||||
if (manifest.user_config) {
|
||||
for (const [key, configOption] of Object.entries(manifest.user_config)) {
|
||||
if (configOption.default !== undefined) {
|
||||
mergedConfig[key] = configOption.default;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Then, override with user settings
|
||||
if (userConfig) {
|
||||
Object.assign(mergedConfig, userConfig);
|
||||
}
|
||||
// Add merged configuration variables for substitution
|
||||
for (const [key, value] of Object.entries(mergedConfig)) {
|
||||
// Convert user config to the format expected by variable substitution
|
||||
const userConfigKey = `user_config.${key}`;
|
||||
if (Array.isArray(value)) {
|
||||
// Keep arrays as arrays for proper expansion
|
||||
variables[userConfigKey] = value.map(String);
|
||||
}
|
||||
else if (typeof value === "boolean") {
|
||||
// Convert booleans to "true"/"false" strings as per spec
|
||||
variables[userConfigKey] = value ? "true" : "false";
|
||||
}
|
||||
else {
|
||||
// Convert other types to strings
|
||||
variables[userConfigKey] = String(value);
|
||||
}
|
||||
}
|
||||
// Replace all variables in the config
|
||||
result = replaceVariables(result, variables);
|
||||
return result;
|
||||
}
|
||||
function isInvalidSingleValue(value) {
|
||||
return value === undefined || value === null || value === "";
|
||||
}
|
||||
/**
|
||||
* Check if an extension has missing required configuration
|
||||
* @param manifest The extension manifest
|
||||
* @param userConfig The user configuration
|
||||
* @returns true if required configuration is missing
|
||||
*/
|
||||
export function hasRequiredConfigMissing({ manifest, userConfig, }) {
|
||||
if (!manifest.user_config) {
|
||||
return false;
|
||||
}
|
||||
const config = userConfig || {};
|
||||
for (const [key, configOption] of Object.entries(manifest.user_config)) {
|
||||
if (configOption.required) {
|
||||
const value = config[key];
|
||||
if (isInvalidSingleValue(value) ||
|
||||
(Array.isArray(value) &&
|
||||
(value.length === 0 || value.some(isInvalidSingleValue)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
29
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/log.js
generated
vendored
Normal file
29
extracted-source/node_modules/@anthropic-ai/mcpb/dist/shared/log.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
export function getLogger({ silent = false } = {}) {
|
||||
return {
|
||||
log: (...args) => {
|
||||
if (!silent) {
|
||||
console.log(...args);
|
||||
}
|
||||
},
|
||||
error: (...args) => {
|
||||
if (!silent) {
|
||||
console.error(...args);
|
||||
}
|
||||
},
|
||||
warn: (...args) => {
|
||||
if (!silent) {
|
||||
console.warn(...args);
|
||||
}
|
||||
},
|
||||
info: (...args) => {
|
||||
if (!silent) {
|
||||
console.info(...args);
|
||||
}
|
||||
},
|
||||
debug: (...args) => {
|
||||
if (!silent) {
|
||||
console.debug(...args);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
9
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/index.js
generated
vendored
Normal file
9
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
// Library exports
|
||||
export { SandboxManager } from './sandbox/sandbox-manager.js';
|
||||
export { SandboxViolationStore } from './sandbox/sandbox-violation-store.js';
|
||||
export { SandboxRuntimeConfigSchema, NetworkConfigSchema, FilesystemConfigSchema, IgnoreViolationsConfigSchema, RipgrepConfigSchema, } from './sandbox/sandbox-config.js';
|
||||
// Utility functions
|
||||
export { getDefaultWritePaths } from './sandbox/sandbox-utils.js';
|
||||
// Platform utilities
|
||||
export { getWslVersion } from './utils/platform.js';
|
||||
//# sourceMappingURL=index.js.map
|
||||
263
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/generate-seccomp-filter.js
generated
vendored
Normal file
263
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/generate-seccomp-filter.js
generated
vendored
Normal file
@@ -0,0 +1,263 @@
|
||||
import { join, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import * as fs from 'node:fs';
|
||||
import { execSync } from 'node:child_process';
|
||||
import { homedir } from 'node:os';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
// Cache for path lookups (key: explicit path or empty string, value: resolved path or null)
|
||||
const bpfPathCache = new Map();
|
||||
const applySeccompPathCache = new Map();
|
||||
// Cache for global npm paths (computed once per process)
|
||||
let cachedGlobalNpmPaths = null;
|
||||
/**
|
||||
* Get paths to check for globally installed @anthropic-ai/sandbox-runtime package.
|
||||
* This is used as a fallback when the binaries aren't bundled (e.g., native builds).
|
||||
*/
|
||||
function getGlobalNpmPaths() {
|
||||
if (cachedGlobalNpmPaths)
|
||||
return cachedGlobalNpmPaths;
|
||||
const paths = [];
|
||||
// Try to get the actual global npm root
|
||||
try {
|
||||
const npmRoot = execSync('npm root -g', {
|
||||
encoding: 'utf8',
|
||||
timeout: 5000,
|
||||
stdio: ['pipe', 'pipe', 'ignore'],
|
||||
}).trim();
|
||||
if (npmRoot) {
|
||||
paths.push(join(npmRoot, '@anthropic-ai', 'sandbox-runtime'));
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// npm not available or failed
|
||||
}
|
||||
// Common global npm locations as fallbacks
|
||||
const home = homedir();
|
||||
paths.push(
|
||||
// npm global (Linux/macOS)
|
||||
join('/usr', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'), join('/usr', 'local', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'),
|
||||
// npm global with prefix (common on macOS with homebrew)
|
||||
join('/opt', 'homebrew', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'),
|
||||
// User-local npm global
|
||||
join(home, '.npm', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'), join(home, '.npm-global', 'lib', 'node_modules', '@anthropic-ai', 'sandbox-runtime'));
|
||||
cachedGlobalNpmPaths = paths;
|
||||
return paths;
|
||||
}
|
||||
/**
|
||||
* Map Node.js process.arch to our vendor directory architecture names
|
||||
* Returns null for unsupported architectures
|
||||
*/
|
||||
function getVendorArchitecture() {
|
||||
const arch = process.arch;
|
||||
switch (arch) {
|
||||
case 'x64':
|
||||
case 'x86_64':
|
||||
return 'x64';
|
||||
case 'arm64':
|
||||
case 'aarch64':
|
||||
return 'arm64';
|
||||
case 'ia32':
|
||||
case 'x86':
|
||||
// TODO: Add support for 32-bit x86 (ia32)
|
||||
// Currently blocked because the seccomp filter does not block the socketcall() syscall,
|
||||
// which is used on 32-bit x86 for all socket operations (socket, socketpair, bind, connect, etc.).
|
||||
// On 32-bit x86, the direct socket() syscall doesn't exist - instead, all socket operations
|
||||
// are multiplexed through socketcall(SYS_SOCKET, ...), socketcall(SYS_SOCKETPAIR, ...), etc.
|
||||
//
|
||||
// To properly support 32-bit x86, we need to:
|
||||
// 1. Build a separate i386 BPF filter (BPF bytecode is architecture-specific)
|
||||
// 2. Modify vendor/seccomp-src/seccomp-unix-block.c to conditionally add rules that block:
|
||||
// - socketcall(SYS_SOCKET, [AF_UNIX, ...])
|
||||
// - socketcall(SYS_SOCKETPAIR, [AF_UNIX, ...])
|
||||
// 3. This requires complex BPF logic to inspect socketcall's sub-function argument
|
||||
//
|
||||
// Until then, 32-bit x86 is not supported to avoid a security bypass.
|
||||
logForDebugging(`[SeccompFilter] 32-bit x86 (ia32) is not currently supported due to missing socketcall() syscall blocking. ` +
|
||||
`The current seccomp filter only blocks socket(AF_UNIX, ...), but on 32-bit x86, socketcall() can be used to bypass this.`, { level: 'error' });
|
||||
return null;
|
||||
default:
|
||||
logForDebugging(`[SeccompFilter] Unsupported architecture: ${arch}. Only x64 and arm64 are supported.`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get local paths to check for seccomp files (bundled or package installs).
|
||||
*/
|
||||
function getLocalSeccompPaths(filename) {
|
||||
const arch = getVendorArchitecture();
|
||||
if (!arch)
|
||||
return [];
|
||||
const baseDir = dirname(fileURLToPath(import.meta.url));
|
||||
const relativePath = join('vendor', 'seccomp', arch, filename);
|
||||
return [
|
||||
join(baseDir, relativePath), // bundled: same directory as bundle (e.g., when bundled into claude-cli)
|
||||
join(baseDir, '..', '..', relativePath), // package root: vendor/seccomp/...
|
||||
join(baseDir, '..', relativePath), // dist: dist/vendor/seccomp/...
|
||||
];
|
||||
}
|
||||
/**
|
||||
* Get the path to a pre-generated BPF filter file from the vendor directory
|
||||
* Returns the path if it exists, null otherwise
|
||||
*
|
||||
* Pre-generated BPF files are organized by architecture:
|
||||
* - vendor/seccomp/{x64,arm64}/unix-block.bpf
|
||||
*
|
||||
* Tries multiple paths for resilience:
|
||||
* 0. Explicit path provided via parameter (checked first if provided)
|
||||
* 1. vendor/seccomp/{arch}/unix-block.bpf (bundled - when bundled into consuming packages)
|
||||
* 2. ../../vendor/seccomp/{arch}/unix-block.bpf (package root - standard npm installs)
|
||||
* 3. ../vendor/seccomp/{arch}/unix-block.bpf (dist/vendor - for bundlers)
|
||||
* 4. Global npm install (if seccompBinaryPath not provided) - for native builds
|
||||
*
|
||||
* @param seccompBinaryPath - Optional explicit path to the BPF filter file. If provided and
|
||||
* exists, it will be used. If not provided, falls back to searching local paths and then
|
||||
* global npm install (for native builds where vendor directory isn't bundled).
|
||||
*/
|
||||
export function getPreGeneratedBpfPath(seccompBinaryPath) {
|
||||
const cacheKey = seccompBinaryPath ?? '';
|
||||
if (bpfPathCache.has(cacheKey)) {
|
||||
return bpfPathCache.get(cacheKey);
|
||||
}
|
||||
const result = findBpfPath(seccompBinaryPath);
|
||||
bpfPathCache.set(cacheKey, result);
|
||||
return result;
|
||||
}
|
||||
// NOTE: This is a slow operation (synchronous fs lookups + execSync). Ensure calls
|
||||
// are memoized at the top level rather than invoked repeatedly.
|
||||
function findBpfPath(seccompBinaryPath) {
|
||||
// Check explicit path first (highest priority)
|
||||
if (seccompBinaryPath) {
|
||||
if (fs.existsSync(seccompBinaryPath)) {
|
||||
logForDebugging(`[SeccompFilter] Using BPF filter from explicit path: ${seccompBinaryPath}`);
|
||||
return seccompBinaryPath;
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Explicit path provided but file not found: ${seccompBinaryPath}`);
|
||||
}
|
||||
const arch = getVendorArchitecture();
|
||||
if (!arch) {
|
||||
logForDebugging(`[SeccompFilter] Cannot find pre-generated BPF filter: unsupported architecture ${process.arch}`);
|
||||
return null;
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Detected architecture: ${arch}`);
|
||||
// Check local paths first (bundled or package install)
|
||||
for (const bpfPath of getLocalSeccompPaths('unix-block.bpf')) {
|
||||
if (fs.existsSync(bpfPath)) {
|
||||
logForDebugging(`[SeccompFilter] Found pre-generated BPF filter: ${bpfPath} (${arch})`);
|
||||
return bpfPath;
|
||||
}
|
||||
}
|
||||
// Fallback: check global npm install (for native builds without bundled vendor)
|
||||
for (const globalBase of getGlobalNpmPaths()) {
|
||||
const bpfPath = join(globalBase, 'vendor', 'seccomp', arch, 'unix-block.bpf');
|
||||
if (fs.existsSync(bpfPath)) {
|
||||
logForDebugging(`[SeccompFilter] Found pre-generated BPF filter in global install: ${bpfPath} (${arch})`);
|
||||
return bpfPath;
|
||||
}
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Pre-generated BPF filter not found in any expected location (${arch})`);
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Get the path to the apply-seccomp binary from the vendor directory
|
||||
* Returns the path if it exists, null otherwise
|
||||
*
|
||||
* Pre-built apply-seccomp binaries are organized by architecture:
|
||||
* - vendor/seccomp/{x64,arm64}/apply-seccomp
|
||||
*
|
||||
* Tries multiple paths for resilience:
|
||||
* 0. Explicit path provided via parameter (checked first if provided)
|
||||
* 1. vendor/seccomp/{arch}/apply-seccomp (bundled - when bundled into consuming packages)
|
||||
* 2. ../../vendor/seccomp/{arch}/apply-seccomp (package root - standard npm installs)
|
||||
* 3. ../vendor/seccomp/{arch}/apply-seccomp (dist/vendor - for bundlers)
|
||||
* 4. Global npm install (if seccompBinaryPath not provided) - for native builds
|
||||
*
|
||||
* @param seccompBinaryPath - Optional explicit path to the apply-seccomp binary. If provided
|
||||
* and exists, it will be used. If not provided, falls back to searching local paths and
|
||||
* then global npm install (for native builds where vendor directory isn't bundled).
|
||||
*/
|
||||
export function getApplySeccompBinaryPath(seccompBinaryPath) {
|
||||
const cacheKey = seccompBinaryPath ?? '';
|
||||
if (applySeccompPathCache.has(cacheKey)) {
|
||||
return applySeccompPathCache.get(cacheKey);
|
||||
}
|
||||
const result = findApplySeccompPath(seccompBinaryPath);
|
||||
applySeccompPathCache.set(cacheKey, result);
|
||||
return result;
|
||||
}
|
||||
function findApplySeccompPath(seccompBinaryPath) {
|
||||
// Check explicit path first (highest priority)
|
||||
if (seccompBinaryPath) {
|
||||
if (fs.existsSync(seccompBinaryPath)) {
|
||||
logForDebugging(`[SeccompFilter] Using apply-seccomp binary from explicit path: ${seccompBinaryPath}`);
|
||||
return seccompBinaryPath;
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Explicit path provided but file not found: ${seccompBinaryPath}`);
|
||||
}
|
||||
const arch = getVendorArchitecture();
|
||||
if (!arch) {
|
||||
logForDebugging(`[SeccompFilter] Cannot find apply-seccomp binary: unsupported architecture ${process.arch}`);
|
||||
return null;
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] Looking for apply-seccomp binary for architecture: ${arch}`);
|
||||
// Check local paths first (bundled or package install)
|
||||
for (const binaryPath of getLocalSeccompPaths('apply-seccomp')) {
|
||||
if (fs.existsSync(binaryPath)) {
|
||||
logForDebugging(`[SeccompFilter] Found apply-seccomp binary: ${binaryPath} (${arch})`);
|
||||
return binaryPath;
|
||||
}
|
||||
}
|
||||
// Fallback: check global npm install (for native builds without bundled vendor)
|
||||
for (const globalBase of getGlobalNpmPaths()) {
|
||||
const binaryPath = join(globalBase, 'vendor', 'seccomp', arch, 'apply-seccomp');
|
||||
if (fs.existsSync(binaryPath)) {
|
||||
logForDebugging(`[SeccompFilter] Found apply-seccomp binary in global install: ${binaryPath} (${arch})`);
|
||||
return binaryPath;
|
||||
}
|
||||
}
|
||||
logForDebugging(`[SeccompFilter] apply-seccomp binary not found in any expected location (${arch})`);
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Get the path to a pre-generated seccomp BPF filter that blocks Unix domain socket creation
|
||||
* Returns the path to the BPF filter file, or null if not available
|
||||
*
|
||||
* The filter blocks socket(AF_UNIX, ...) syscalls while allowing all other syscalls.
|
||||
* This prevents creation of new Unix domain socket file descriptors.
|
||||
*
|
||||
* Security scope:
|
||||
* - Blocks: socket(AF_UNIX, ...) syscall (creating new Unix socket FDs)
|
||||
* - Does NOT block: Operations on inherited Unix socket FDs (bind, connect, sendto, etc.)
|
||||
* - Does NOT block: Unix socket FDs passed via SCM_RIGHTS
|
||||
* - For most sandboxing scenarios, blocking socket creation is sufficient
|
||||
*
|
||||
* Note: This blocks ALL Unix socket creation, regardless of path. The allowUnixSockets
|
||||
* configuration is not supported on Linux due to seccomp-bpf limitations (it cannot
|
||||
* read user-space memory to inspect socket paths).
|
||||
*
|
||||
* Requirements:
|
||||
* - Pre-generated BPF filters included for x64 and ARM64 only
|
||||
* - Other architectures are not supported
|
||||
*
|
||||
* @param seccompBinaryPath - Optional explicit path to the BPF filter file
|
||||
* @returns Path to the pre-generated BPF filter file, or null if not available
|
||||
*/
|
||||
export function generateSeccompFilter(seccompBinaryPath) {
|
||||
const preGeneratedBpf = getPreGeneratedBpfPath(seccompBinaryPath);
|
||||
if (preGeneratedBpf) {
|
||||
logForDebugging('[SeccompFilter] Using pre-generated BPF filter');
|
||||
return preGeneratedBpf;
|
||||
}
|
||||
logForDebugging('[SeccompFilter] Pre-generated BPF filter not available for this architecture. ' +
|
||||
'Only x64 and arm64 are supported.', { level: 'error' });
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Clean up a seccomp filter file
|
||||
* Since we only use pre-generated BPF files from vendor/, this is a no-op.
|
||||
* Pre-generated files are never deleted.
|
||||
* Kept for backward compatibility with existing code that calls it.
|
||||
*/
|
||||
export function cleanupSeccompFilter(_filterPath) {
|
||||
// No-op: pre-generated BPF files are never cleaned up
|
||||
}
|
||||
//# sourceMappingURL=generate-seccomp-filter.js.map
|
||||
217
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/http-proxy.js
generated
vendored
Normal file
217
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/http-proxy.js
generated
vendored
Normal file
@@ -0,0 +1,217 @@
|
||||
import { Agent, createServer } from 'node:http';
|
||||
import { request as httpRequest } from 'node:http';
|
||||
import { request as httpsRequest } from 'node:https';
|
||||
import { connect } from 'node:net';
|
||||
import { URL } from 'node:url';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
export function createHttpProxyServer(options) {
|
||||
const server = createServer();
|
||||
// Handle CONNECT requests for HTTPS traffic
|
||||
server.on('connect', async (req, socket) => {
|
||||
// Attach error handler immediately to prevent unhandled errors
|
||||
socket.on('error', err => {
|
||||
logForDebugging(`Client socket error: ${err.message}`, { level: 'error' });
|
||||
});
|
||||
try {
|
||||
const [hostname, portStr] = req.url.split(':');
|
||||
const port = portStr === undefined ? undefined : parseInt(portStr, 10);
|
||||
if (!hostname || !port) {
|
||||
logForDebugging(`Invalid CONNECT request: ${req.url}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 400 Bad Request\r\n\r\n');
|
||||
return;
|
||||
}
|
||||
const allowed = await options.filter(port, hostname, socket);
|
||||
if (!allowed) {
|
||||
logForDebugging(`Connection blocked to ${hostname}:${port}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 403 Forbidden\r\n' +
|
||||
'Content-Type: text/plain\r\n' +
|
||||
'X-Proxy-Error: blocked-by-allowlist\r\n' +
|
||||
'\r\n' +
|
||||
'Connection blocked by network allowlist');
|
||||
return;
|
||||
}
|
||||
// Check if this host should be routed through a MITM proxy
|
||||
const mitmSocketPath = options.getMitmSocketPath?.(hostname);
|
||||
if (mitmSocketPath) {
|
||||
// Route through MITM proxy via Unix socket
|
||||
logForDebugging(`Routing CONNECT ${hostname}:${port} through MITM proxy at ${mitmSocketPath}`);
|
||||
const mitmSocket = connect({ path: mitmSocketPath }, () => {
|
||||
// Send CONNECT request to the MITM proxy
|
||||
mitmSocket.write(`CONNECT ${hostname}:${port} HTTP/1.1\r\n` +
|
||||
`Host: ${hostname}:${port}\r\n` +
|
||||
'\r\n');
|
||||
});
|
||||
// Buffer to accumulate the MITM proxy's response
|
||||
let responseBuffer = '';
|
||||
const onMitmData = (chunk) => {
|
||||
responseBuffer += chunk.toString();
|
||||
// Check if we've received the full HTTP response headers
|
||||
const headerEndIndex = responseBuffer.indexOf('\r\n\r\n');
|
||||
if (headerEndIndex !== -1) {
|
||||
// Remove data listener, we're done parsing the response
|
||||
mitmSocket.removeListener('data', onMitmData);
|
||||
// Check if MITM proxy accepted the connection
|
||||
const statusLine = responseBuffer.substring(0, responseBuffer.indexOf('\r\n'));
|
||||
if (statusLine.includes(' 200 ')) {
|
||||
// Connection established, now pipe data between client and MITM
|
||||
socket.write('HTTP/1.1 200 Connection Established\r\n\r\n');
|
||||
// If there's any data after the headers, write it to the client
|
||||
const remainingData = responseBuffer.substring(headerEndIndex + 4);
|
||||
if (remainingData.length > 0) {
|
||||
socket.write(remainingData);
|
||||
}
|
||||
mitmSocket.pipe(socket);
|
||||
socket.pipe(mitmSocket);
|
||||
}
|
||||
else {
|
||||
logForDebugging(`MITM proxy rejected CONNECT: ${statusLine}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 502 Bad Gateway\r\n\r\n');
|
||||
mitmSocket.destroy();
|
||||
}
|
||||
}
|
||||
};
|
||||
mitmSocket.on('data', onMitmData);
|
||||
mitmSocket.on('error', err => {
|
||||
logForDebugging(`MITM proxy connection failed: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 502 Bad Gateway\r\n\r\n');
|
||||
});
|
||||
socket.on('error', err => {
|
||||
logForDebugging(`Client socket error: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
mitmSocket.destroy();
|
||||
});
|
||||
socket.on('end', () => mitmSocket.end());
|
||||
mitmSocket.on('end', () => socket.end());
|
||||
}
|
||||
else {
|
||||
// Direct connection (original behavior)
|
||||
const serverSocket = connect(port, hostname, () => {
|
||||
socket.write('HTTP/1.1 200 Connection Established\r\n\r\n');
|
||||
serverSocket.pipe(socket);
|
||||
socket.pipe(serverSocket);
|
||||
});
|
||||
serverSocket.on('error', err => {
|
||||
logForDebugging(`CONNECT tunnel failed: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
socket.end('HTTP/1.1 502 Bad Gateway\r\n\r\n');
|
||||
});
|
||||
socket.on('error', err => {
|
||||
logForDebugging(`Client socket error: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
serverSocket.destroy();
|
||||
});
|
||||
socket.on('end', () => serverSocket.end());
|
||||
serverSocket.on('end', () => socket.end());
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`Error handling CONNECT: ${err}`, { level: 'error' });
|
||||
socket.end('HTTP/1.1 500 Internal Server Error\r\n\r\n');
|
||||
}
|
||||
});
|
||||
// Handle regular HTTP requests
|
||||
server.on('request', async (req, res) => {
|
||||
try {
|
||||
const url = new URL(req.url);
|
||||
const hostname = url.hostname;
|
||||
const port = url.port
|
||||
? parseInt(url.port, 10)
|
||||
: url.protocol === 'https:'
|
||||
? 443
|
||||
: 80;
|
||||
const allowed = await options.filter(port, hostname, req.socket);
|
||||
if (!allowed) {
|
||||
logForDebugging(`HTTP request blocked to ${hostname}:${port}`, {
|
||||
level: 'error',
|
||||
});
|
||||
res.writeHead(403, {
|
||||
'Content-Type': 'text/plain',
|
||||
'X-Proxy-Error': 'blocked-by-allowlist',
|
||||
});
|
||||
res.end('Connection blocked by network allowlist');
|
||||
return;
|
||||
}
|
||||
// Check if this host should be routed through a MITM proxy
|
||||
const mitmSocketPath = options.getMitmSocketPath?.(hostname);
|
||||
if (mitmSocketPath) {
|
||||
// Route through MITM proxy via Unix socket
|
||||
// Use an agent that connects via the Unix socket
|
||||
logForDebugging(`Routing HTTP ${req.method} ${hostname}:${port} through MITM proxy at ${mitmSocketPath}`);
|
||||
const mitmAgent = new Agent({
|
||||
// @ts-expect-error - socketPath is valid but not in types
|
||||
socketPath: mitmSocketPath,
|
||||
});
|
||||
// Send request to MITM proxy with full URL (proxy-style request)
|
||||
const proxyReq = httpRequest({
|
||||
agent: mitmAgent,
|
||||
// For proxy requests, path should be the full URL
|
||||
path: req.url,
|
||||
method: req.method,
|
||||
headers: {
|
||||
...req.headers,
|
||||
host: url.host,
|
||||
},
|
||||
}, proxyRes => {
|
||||
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
||||
proxyRes.pipe(res);
|
||||
});
|
||||
proxyReq.on('error', err => {
|
||||
logForDebugging(`MITM proxy request failed: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end('Bad Gateway');
|
||||
}
|
||||
});
|
||||
req.pipe(proxyReq);
|
||||
}
|
||||
else {
|
||||
// Direct request (original behavior)
|
||||
// Choose http or https module
|
||||
const requestFn = url.protocol === 'https:' ? httpsRequest : httpRequest;
|
||||
const proxyReq = requestFn({
|
||||
hostname,
|
||||
port,
|
||||
path: url.pathname + url.search,
|
||||
method: req.method,
|
||||
headers: {
|
||||
...req.headers,
|
||||
host: url.host,
|
||||
},
|
||||
}, proxyRes => {
|
||||
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
||||
proxyRes.pipe(res);
|
||||
});
|
||||
proxyReq.on('error', err => {
|
||||
logForDebugging(`Proxy request failed: ${err.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end('Bad Gateway');
|
||||
}
|
||||
});
|
||||
req.pipe(proxyReq);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`Error handling HTTP request: ${err}`, { level: 'error' });
|
||||
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
||||
res.end('Internal Server Error');
|
||||
}
|
||||
});
|
||||
return server;
|
||||
}
|
||||
//# sourceMappingURL=http-proxy.js.map
|
||||
875
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/linux-sandbox-utils.js
generated
vendored
Normal file
875
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/linux-sandbox-utils.js
generated
vendored
Normal file
@@ -0,0 +1,875 @@
|
||||
import shellquote from 'shell-quote';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
import { whichSync } from '../utils/which.js';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import * as fs from 'fs';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { tmpdir } from 'node:os';
|
||||
import path, { join } from 'node:path';
|
||||
import { ripGrep } from '../utils/ripgrep.js';
|
||||
import { generateProxyEnvVars, normalizePathForSandbox, normalizeCaseForComparison, isSymlinkOutsideBoundary, DANGEROUS_FILES, getDangerousDirectories, } from './sandbox-utils.js';
|
||||
import { generateSeccompFilter, cleanupSeccompFilter, getPreGeneratedBpfPath, getApplySeccompBinaryPath, } from './generate-seccomp-filter.js';
|
||||
/** Default max depth for searching dangerous files */
|
||||
const DEFAULT_MANDATORY_DENY_SEARCH_DEPTH = 3;
|
||||
/**
|
||||
* Find if any component of the path is a symlink within the allowed write paths.
|
||||
* Returns the symlink path if found, or null if no symlinks.
|
||||
*
|
||||
* This is used to detect and block symlink replacement attacks where an attacker
|
||||
* could delete a symlink and create a real directory with malicious content.
|
||||
*/
|
||||
function findSymlinkInPath(targetPath, allowedWritePaths) {
|
||||
const parts = targetPath.split(path.sep);
|
||||
let currentPath = '';
|
||||
for (const part of parts) {
|
||||
if (!part)
|
||||
continue; // Skip empty parts (leading /)
|
||||
const nextPath = currentPath + path.sep + part;
|
||||
try {
|
||||
const stats = fs.lstatSync(nextPath);
|
||||
if (stats.isSymbolicLink()) {
|
||||
// Check if this symlink is within an allowed write path
|
||||
const isWithinAllowedPath = allowedWritePaths.some(allowedPath => nextPath.startsWith(allowedPath + '/') || nextPath === allowedPath);
|
||||
if (isWithinAllowedPath) {
|
||||
return nextPath;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Path doesn't exist - no symlink issue here
|
||||
break;
|
||||
}
|
||||
currentPath = nextPath;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Check if any existing component in the path is a file (not a directory).
|
||||
* If so, the target path can never be created because you can't mkdir under a file.
|
||||
*
|
||||
* This handles the git worktree case: .git is a file, so .git/hooks can never
|
||||
* exist and there's nothing to deny.
|
||||
*/
|
||||
function hasFileAncestor(targetPath) {
|
||||
const parts = targetPath.split(path.sep);
|
||||
let currentPath = '';
|
||||
for (const part of parts) {
|
||||
if (!part)
|
||||
continue; // Skip empty parts (leading /)
|
||||
const nextPath = currentPath + path.sep + part;
|
||||
try {
|
||||
const stat = fs.statSync(nextPath);
|
||||
if (stat.isFile() || stat.isSymbolicLink()) {
|
||||
// This component exists as a file — nothing below it can be created
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Path doesn't exist — stop checking
|
||||
break;
|
||||
}
|
||||
currentPath = nextPath;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Find the first non-existent path component.
|
||||
* E.g., for "/existing/parent/nonexistent/child/file.txt" where /existing/parent exists,
|
||||
* returns "/existing/parent/nonexistent"
|
||||
*
|
||||
* This is used to block creation of non-existent deny paths by mounting /dev/null
|
||||
* at the first missing component, preventing mkdir from creating the parent directories.
|
||||
*/
|
||||
function findFirstNonExistentComponent(targetPath) {
|
||||
const parts = targetPath.split(path.sep);
|
||||
let currentPath = '';
|
||||
for (const part of parts) {
|
||||
if (!part)
|
||||
continue; // Skip empty parts (leading /)
|
||||
const nextPath = currentPath + path.sep + part;
|
||||
if (!fs.existsSync(nextPath)) {
|
||||
return nextPath;
|
||||
}
|
||||
currentPath = nextPath;
|
||||
}
|
||||
return targetPath; // Shouldn't reach here if called correctly
|
||||
}
|
||||
/**
|
||||
* Get mandatory deny paths using ripgrep (Linux only).
|
||||
* Uses a SINGLE ripgrep call with multiple glob patterns for efficiency.
|
||||
* With --max-depth limiting, this is fast enough to run on each command without memoization.
|
||||
*/
|
||||
async function linuxGetMandatoryDenyPaths(ripgrepConfig = { command: 'rg' }, maxDepth = DEFAULT_MANDATORY_DENY_SEARCH_DEPTH, allowGitConfig = false, abortSignal) {
|
||||
const cwd = process.cwd();
|
||||
// Use provided signal or create a fallback controller
|
||||
const fallbackController = new AbortController();
|
||||
const signal = abortSignal ?? fallbackController.signal;
|
||||
const dangerousDirectories = getDangerousDirectories();
|
||||
// Note: Settings files are added at the callsite in sandbox-manager.ts
|
||||
const denyPaths = [
|
||||
// Dangerous files in CWD
|
||||
...DANGEROUS_FILES.map(f => path.resolve(cwd, f)),
|
||||
// Dangerous directories in CWD
|
||||
...dangerousDirectories.map(d => path.resolve(cwd, d)),
|
||||
];
|
||||
// Git hooks and config are only denied when .git exists as a directory.
|
||||
// In git worktrees, .git is a file (e.g., "gitdir: /path/..."), so
|
||||
// .git/hooks can never exist — denying it would cause bwrap to fail.
|
||||
// When .git doesn't exist at all, mounting at .git would block its
|
||||
// creation and break git init.
|
||||
const dotGitPath = path.resolve(cwd, '.git');
|
||||
let dotGitIsDirectory = false;
|
||||
try {
|
||||
dotGitIsDirectory = fs.statSync(dotGitPath).isDirectory();
|
||||
}
|
||||
catch {
|
||||
// .git doesn't exist
|
||||
}
|
||||
if (dotGitIsDirectory) {
|
||||
// Git hooks always blocked for security
|
||||
denyPaths.push(path.resolve(cwd, '.git/hooks'));
|
||||
// Git config conditionally blocked based on allowGitConfig setting
|
||||
if (!allowGitConfig) {
|
||||
denyPaths.push(path.resolve(cwd, '.git/config'));
|
||||
}
|
||||
}
|
||||
// Build iglob args for all patterns in one ripgrep call
|
||||
const iglobArgs = [];
|
||||
for (const fileName of DANGEROUS_FILES) {
|
||||
iglobArgs.push('--iglob', fileName);
|
||||
}
|
||||
for (const dirName of dangerousDirectories) {
|
||||
iglobArgs.push('--iglob', `**/${dirName}/**`);
|
||||
}
|
||||
// Git hooks always blocked in nested repos
|
||||
iglobArgs.push('--iglob', '**/.git/hooks/**');
|
||||
// Git config conditionally blocked in nested repos
|
||||
if (!allowGitConfig) {
|
||||
iglobArgs.push('--iglob', '**/.git/config');
|
||||
}
|
||||
// Single ripgrep call to find all dangerous paths in subdirectories
|
||||
// Limit depth for performance - deeply nested dangerous files are rare
|
||||
// and the security benefit doesn't justify the traversal cost
|
||||
let matches = [];
|
||||
try {
|
||||
matches = await ripGrep([
|
||||
'--files',
|
||||
'--hidden',
|
||||
'--max-depth',
|
||||
String(maxDepth),
|
||||
...iglobArgs,
|
||||
'-g',
|
||||
'!**/node_modules/**',
|
||||
], cwd, signal, ripgrepConfig);
|
||||
}
|
||||
catch (error) {
|
||||
logForDebugging(`[Sandbox] ripgrep scan failed: ${error}`);
|
||||
}
|
||||
// Process matches
|
||||
for (const match of matches) {
|
||||
const absolutePath = path.resolve(cwd, match);
|
||||
// File inside a dangerous directory -> add the directory path
|
||||
let foundDir = false;
|
||||
for (const dirName of [...dangerousDirectories, '.git']) {
|
||||
const normalizedDirName = normalizeCaseForComparison(dirName);
|
||||
const segments = absolutePath.split(path.sep);
|
||||
const dirIndex = segments.findIndex(s => normalizeCaseForComparison(s) === normalizedDirName);
|
||||
if (dirIndex !== -1) {
|
||||
// For .git, we want hooks/ or config, not the whole .git dir
|
||||
if (dirName === '.git') {
|
||||
const gitDir = segments.slice(0, dirIndex + 1).join(path.sep);
|
||||
if (match.includes('.git/hooks')) {
|
||||
denyPaths.push(path.join(gitDir, 'hooks'));
|
||||
}
|
||||
else if (match.includes('.git/config')) {
|
||||
denyPaths.push(path.join(gitDir, 'config'));
|
||||
}
|
||||
}
|
||||
else {
|
||||
denyPaths.push(segments.slice(0, dirIndex + 1).join(path.sep));
|
||||
}
|
||||
foundDir = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Dangerous file match
|
||||
if (!foundDir) {
|
||||
denyPaths.push(absolutePath);
|
||||
}
|
||||
}
|
||||
return [...new Set(denyPaths)];
|
||||
}
|
||||
// Track generated seccomp filters for cleanup on process exit
|
||||
const generatedSeccompFilters = new Set();
|
||||
// Track mount points created by bwrap for non-existent deny paths.
|
||||
// When bwrap does --ro-bind /dev/null /nonexistent/path, it creates an empty
|
||||
// file on the host as a mount point. These persist after bwrap exits and must
|
||||
// be cleaned up explicitly.
|
||||
const bwrapMountPoints = new Set();
|
||||
let exitHandlerRegistered = false;
|
||||
/**
|
||||
* Register cleanup handler for generated seccomp filters and bwrap mount points
|
||||
*/
|
||||
function registerExitCleanupHandler() {
|
||||
if (exitHandlerRegistered) {
|
||||
return;
|
||||
}
|
||||
process.on('exit', () => {
|
||||
for (const filterPath of generatedSeccompFilters) {
|
||||
try {
|
||||
cleanupSeccompFilter(filterPath);
|
||||
}
|
||||
catch {
|
||||
// Ignore cleanup errors during exit
|
||||
}
|
||||
}
|
||||
cleanupBwrapMountPoints();
|
||||
});
|
||||
exitHandlerRegistered = true;
|
||||
}
|
||||
/**
|
||||
* Clean up mount point files created by bwrap for non-existent deny paths.
|
||||
*
|
||||
* When protecting non-existent deny paths, bwrap creates empty files on the
|
||||
* host filesystem as mount points for --ro-bind. These files persist after
|
||||
* bwrap exits. This function removes them.
|
||||
*
|
||||
* This should be called after each sandboxed command completes to prevent
|
||||
* ghost dotfiles (e.g. .bashrc, .gitconfig) from appearing in the working
|
||||
* directory. It is also called automatically on process exit as a safety net.
|
||||
*
|
||||
* Safe to call at any time — it only removes files that were tracked during
|
||||
* generateFilesystemArgs() and skips any that no longer exist.
|
||||
*/
|
||||
export function cleanupBwrapMountPoints() {
|
||||
for (const mountPoint of bwrapMountPoints) {
|
||||
try {
|
||||
// Only remove if it's still the empty file/directory bwrap created.
|
||||
// If something else has written real content, leave it alone.
|
||||
const stat = fs.statSync(mountPoint);
|
||||
if (stat.isFile() && stat.size === 0) {
|
||||
fs.unlinkSync(mountPoint);
|
||||
logForDebugging(`[Sandbox Linux] Cleaned up bwrap mount point (file): ${mountPoint}`);
|
||||
}
|
||||
else if (stat.isDirectory()) {
|
||||
// Empty directory mount points are created for intermediate
|
||||
// components (Fix 2). Only remove if still empty.
|
||||
const entries = fs.readdirSync(mountPoint);
|
||||
if (entries.length === 0) {
|
||||
fs.rmdirSync(mountPoint);
|
||||
logForDebugging(`[Sandbox Linux] Cleaned up bwrap mount point (dir): ${mountPoint}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Ignore cleanup errors — the file may have already been removed
|
||||
}
|
||||
}
|
||||
bwrapMountPoints.clear();
|
||||
}
|
||||
/**
|
||||
* Get detailed status of Linux sandbox dependencies
|
||||
*/
|
||||
export function getLinuxDependencyStatus(seccompConfig) {
|
||||
return {
|
||||
hasBwrap: whichSync('bwrap') !== null,
|
||||
hasSocat: whichSync('socat') !== null,
|
||||
hasSeccompBpf: getPreGeneratedBpfPath(seccompConfig?.bpfPath) !== null,
|
||||
hasSeccompApply: getApplySeccompBinaryPath(seccompConfig?.applyPath) !== null,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Check sandbox dependencies and return structured result
|
||||
*/
|
||||
export function checkLinuxDependencies(seccompConfig) {
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
if (whichSync('bwrap') === null)
|
||||
errors.push('bubblewrap (bwrap) not installed');
|
||||
if (whichSync('socat') === null)
|
||||
errors.push('socat not installed');
|
||||
const hasBpf = getPreGeneratedBpfPath(seccompConfig?.bpfPath) !== null;
|
||||
const hasApply = getApplySeccompBinaryPath(seccompConfig?.applyPath) !== null;
|
||||
if (!hasBpf || !hasApply) {
|
||||
warnings.push('seccomp not available - unix socket access not restricted');
|
||||
}
|
||||
return { warnings, errors };
|
||||
}
|
||||
/**
|
||||
* Initialize the Linux network bridge for sandbox networking
|
||||
*
|
||||
* ARCHITECTURE NOTE:
|
||||
* Linux network sandboxing uses bwrap --unshare-net which creates a completely isolated
|
||||
* network namespace with NO network access. To enable network access, we:
|
||||
*
|
||||
* 1. Host side: Run socat bridges that listen on Unix sockets and forward to host proxy servers
|
||||
* - HTTP bridge: Unix socket -> host HTTP proxy (for HTTP/HTTPS traffic)
|
||||
* - SOCKS bridge: Unix socket -> host SOCKS5 proxy (for SSH/git traffic)
|
||||
*
|
||||
* 2. Sandbox side: Bind the Unix sockets into the isolated namespace and run socat listeners
|
||||
* - HTTP listener on port 3128 -> HTTP Unix socket -> host HTTP proxy
|
||||
* - SOCKS listener on port 1080 -> SOCKS Unix socket -> host SOCKS5 proxy
|
||||
*
|
||||
* 3. Configure environment:
|
||||
* - HTTP_PROXY=http://localhost:3128 for HTTP/HTTPS tools
|
||||
* - GIT_SSH_COMMAND with socat for SSH through SOCKS5
|
||||
*
|
||||
* LIMITATION: Unlike macOS sandbox which can enforce domain-based allowlists at the kernel level,
|
||||
* Linux's --unshare-net provides only all-or-nothing network isolation. Domain filtering happens
|
||||
* at the host proxy level, not the sandbox boundary. This means network restrictions on Linux
|
||||
* depend on the proxy's filtering capabilities.
|
||||
*
|
||||
* DEPENDENCIES: Requires bwrap (bubblewrap) and socat
|
||||
*/
|
||||
export async function initializeLinuxNetworkBridge(httpProxyPort, socksProxyPort) {
|
||||
const socketId = randomBytes(8).toString('hex');
|
||||
const httpSocketPath = join(tmpdir(), `claude-http-${socketId}.sock`);
|
||||
const socksSocketPath = join(tmpdir(), `claude-socks-${socketId}.sock`);
|
||||
// Start HTTP bridge
|
||||
const httpSocatArgs = [
|
||||
`UNIX-LISTEN:${httpSocketPath},fork,reuseaddr`,
|
||||
`TCP:localhost:${httpProxyPort},keepalive,keepidle=10,keepintvl=5,keepcnt=3`,
|
||||
];
|
||||
logForDebugging(`Starting HTTP bridge: socat ${httpSocatArgs.join(' ')}`);
|
||||
const httpBridgeProcess = spawn('socat', httpSocatArgs, {
|
||||
stdio: 'ignore',
|
||||
});
|
||||
if (!httpBridgeProcess.pid) {
|
||||
throw new Error('Failed to start HTTP bridge process');
|
||||
}
|
||||
// Add error and exit handlers to monitor bridge health
|
||||
httpBridgeProcess.on('error', err => {
|
||||
logForDebugging(`HTTP bridge process error: ${err}`, { level: 'error' });
|
||||
});
|
||||
httpBridgeProcess.on('exit', (code, signal) => {
|
||||
logForDebugging(`HTTP bridge process exited with code ${code}, signal ${signal}`, { level: code === 0 ? 'info' : 'error' });
|
||||
});
|
||||
// Start SOCKS bridge
|
||||
const socksSocatArgs = [
|
||||
`UNIX-LISTEN:${socksSocketPath},fork,reuseaddr`,
|
||||
`TCP:localhost:${socksProxyPort},keepalive,keepidle=10,keepintvl=5,keepcnt=3`,
|
||||
];
|
||||
logForDebugging(`Starting SOCKS bridge: socat ${socksSocatArgs.join(' ')}`);
|
||||
const socksBridgeProcess = spawn('socat', socksSocatArgs, {
|
||||
stdio: 'ignore',
|
||||
});
|
||||
if (!socksBridgeProcess.pid) {
|
||||
// Clean up HTTP bridge
|
||||
if (httpBridgeProcess.pid) {
|
||||
try {
|
||||
process.kill(httpBridgeProcess.pid, 'SIGTERM');
|
||||
}
|
||||
catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
throw new Error('Failed to start SOCKS bridge process');
|
||||
}
|
||||
// Add error and exit handlers to monitor bridge health
|
||||
socksBridgeProcess.on('error', err => {
|
||||
logForDebugging(`SOCKS bridge process error: ${err}`, { level: 'error' });
|
||||
});
|
||||
socksBridgeProcess.on('exit', (code, signal) => {
|
||||
logForDebugging(`SOCKS bridge process exited with code ${code}, signal ${signal}`, { level: code === 0 ? 'info' : 'error' });
|
||||
});
|
||||
// Wait for both sockets to be ready
|
||||
const maxAttempts = 5;
|
||||
for (let i = 0; i < maxAttempts; i++) {
|
||||
if (!httpBridgeProcess.pid ||
|
||||
httpBridgeProcess.killed ||
|
||||
!socksBridgeProcess.pid ||
|
||||
socksBridgeProcess.killed) {
|
||||
throw new Error('Linux bridge process died unexpectedly');
|
||||
}
|
||||
try {
|
||||
// fs already imported
|
||||
if (fs.existsSync(httpSocketPath) && fs.existsSync(socksSocketPath)) {
|
||||
logForDebugging(`Linux bridges ready after ${i + 1} attempts`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`Error checking sockets (attempt ${i + 1}): ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
if (i === maxAttempts - 1) {
|
||||
// Clean up both processes
|
||||
if (httpBridgeProcess.pid) {
|
||||
try {
|
||||
process.kill(httpBridgeProcess.pid, 'SIGTERM');
|
||||
}
|
||||
catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
if (socksBridgeProcess.pid) {
|
||||
try {
|
||||
process.kill(socksBridgeProcess.pid, 'SIGTERM');
|
||||
}
|
||||
catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
throw new Error(`Failed to create bridge sockets after ${maxAttempts} attempts`);
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, i * 100));
|
||||
}
|
||||
return {
|
||||
httpSocketPath,
|
||||
socksSocketPath,
|
||||
httpBridgeProcess,
|
||||
socksBridgeProcess,
|
||||
httpProxyPort,
|
||||
socksProxyPort,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Build the command that runs inside the sandbox.
|
||||
* Sets up HTTP proxy on port 3128 and SOCKS proxy on port 1080
|
||||
*/
|
||||
function buildSandboxCommand(httpSocketPath, socksSocketPath, userCommand, seccompFilterPath, shell, applySeccompPath) {
|
||||
// Default to bash for backward compatibility
|
||||
const shellPath = shell || 'bash';
|
||||
const socatCommands = [
|
||||
`socat TCP-LISTEN:3128,fork,reuseaddr UNIX-CONNECT:${httpSocketPath} >/dev/null 2>&1 &`,
|
||||
`socat TCP-LISTEN:1080,fork,reuseaddr UNIX-CONNECT:${socksSocketPath} >/dev/null 2>&1 &`,
|
||||
'trap "kill %1 %2 2>/dev/null; exit" EXIT',
|
||||
];
|
||||
// If seccomp filter is provided, use apply-seccomp to apply it
|
||||
if (seccompFilterPath) {
|
||||
// apply-seccomp approach:
|
||||
// 1. Outer bwrap/bash: starts socat processes (can use Unix sockets)
|
||||
// 2. apply-seccomp: applies seccomp filter and execs user command
|
||||
// 3. User command runs with seccomp active (Unix sockets blocked)
|
||||
//
|
||||
// apply-seccomp is a simple C program that:
|
||||
// - Sets PR_SET_NO_NEW_PRIVS
|
||||
// - Applies the seccomp BPF filter via prctl(PR_SET_SECCOMP)
|
||||
// - Execs the user command
|
||||
//
|
||||
// This is simpler and more portable than nested bwrap, with no FD redirects needed.
|
||||
const applySeccompBinary = getApplySeccompBinaryPath(applySeccompPath);
|
||||
if (!applySeccompBinary) {
|
||||
throw new Error('apply-seccomp binary not found. This should have been caught earlier. ' +
|
||||
'Ensure vendor/seccomp/{x64,arm64}/apply-seccomp binaries are included in the package.');
|
||||
}
|
||||
const applySeccompCmd = shellquote.quote([
|
||||
applySeccompBinary,
|
||||
seccompFilterPath,
|
||||
shellPath,
|
||||
'-c',
|
||||
userCommand,
|
||||
]);
|
||||
const innerScript = [...socatCommands, applySeccompCmd].join('\n');
|
||||
return `${shellPath} -c ${shellquote.quote([innerScript])}`;
|
||||
}
|
||||
else {
|
||||
// No seccomp filter - run user command directly
|
||||
const innerScript = [
|
||||
...socatCommands,
|
||||
`eval ${shellquote.quote([userCommand])}`,
|
||||
].join('\n');
|
||||
return `${shellPath} -c ${shellquote.quote([innerScript])}`;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate filesystem bind mount arguments for bwrap
|
||||
*/
|
||||
async function generateFilesystemArgs(readConfig, writeConfig, ripgrepConfig = { command: 'rg' }, mandatoryDenySearchDepth = DEFAULT_MANDATORY_DENY_SEARCH_DEPTH, allowGitConfig = false, abortSignal) {
|
||||
const args = [];
|
||||
// fs already imported
|
||||
// Determine initial root mount based on write restrictions
|
||||
if (writeConfig) {
|
||||
// Write restrictions: Start with read-only root, then allow writes to specific paths
|
||||
args.push('--ro-bind', '/', '/');
|
||||
// Collect normalized allowed write paths for later checking
|
||||
const allowedWritePaths = [];
|
||||
// Allow writes to specific paths
|
||||
for (const pathPattern of writeConfig.allowOnly || []) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
logForDebugging(`[Sandbox Linux] Processing write path: ${pathPattern} -> ${normalizedPath}`);
|
||||
// Skip /dev/* paths since --dev /dev already handles them
|
||||
if (normalizedPath.startsWith('/dev/')) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping /dev path: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
if (!fs.existsSync(normalizedPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping non-existent write path: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
// Check if path is a symlink pointing outside expected boundaries
|
||||
// bwrap follows symlinks, so --bind on a symlink makes the target writable
|
||||
// This could unexpectedly expose paths the user didn't intend to allow
|
||||
try {
|
||||
const resolvedPath = fs.realpathSync(normalizedPath);
|
||||
// Trim trailing slashes before comparing: realpathSync never returns
|
||||
// a trailing slash, but normalizedPath may have one, which would cause
|
||||
// a false mismatch and incorrectly treat the path as a symlink.
|
||||
const normalizedForComparison = normalizedPath.replace(/\/+$/, '');
|
||||
if (resolvedPath !== normalizedForComparison &&
|
||||
isSymlinkOutsideBoundary(normalizedPath, resolvedPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping symlink write path pointing outside expected location: ${pathPattern} -> ${resolvedPath}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// realpathSync failed - path might not exist or be accessible, skip it
|
||||
logForDebugging(`[Sandbox Linux] Skipping write path that could not be resolved: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
args.push('--bind', normalizedPath, normalizedPath);
|
||||
allowedWritePaths.push(normalizedPath);
|
||||
}
|
||||
// Deny writes within allowed paths (user-specified + mandatory denies)
|
||||
const denyPaths = [
|
||||
...(writeConfig.denyWithinAllow || []),
|
||||
...(await linuxGetMandatoryDenyPaths(ripgrepConfig, mandatoryDenySearchDepth, allowGitConfig, abortSignal)),
|
||||
];
|
||||
for (const pathPattern of denyPaths) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
// Skip /dev/* paths since --dev /dev already handles them
|
||||
if (normalizedPath.startsWith('/dev/')) {
|
||||
continue;
|
||||
}
|
||||
// Check for symlinks in the path - if any parent component is a symlink,
|
||||
// mount /dev/null there to prevent symlink replacement attacks.
|
||||
// Attack scenario: .claude is a symlink to ./decoy/, attacker deletes
|
||||
// symlink and creates real .claude/settings.json with malicious hooks.
|
||||
const symlinkInPath = findSymlinkInPath(normalizedPath, allowedWritePaths);
|
||||
if (symlinkInPath) {
|
||||
args.push('--ro-bind', '/dev/null', symlinkInPath);
|
||||
logForDebugging(`[Sandbox Linux] Mounted /dev/null at symlink ${symlinkInPath} to prevent symlink replacement attack`);
|
||||
continue;
|
||||
}
|
||||
// Handle non-existent paths by mounting /dev/null to block creation.
|
||||
// Without this, a sandboxed process could mkdir+write a denied path that
|
||||
// doesn't exist yet, bypassing the deny rule entirely.
|
||||
//
|
||||
// bwrap creates empty files on the host as mount points for these binds.
|
||||
// We track them in bwrapMountPoints so cleanupBwrapMountPoints() can
|
||||
// remove them after the command exits.
|
||||
if (!fs.existsSync(normalizedPath)) {
|
||||
// Fix 1 (worktree): If any existing component in the deny path is a
|
||||
// file (not a directory), skip the deny entirely. You can't mkdir
|
||||
// under a file, so the deny path can never be created. This handles
|
||||
// git worktrees where .git is a file.
|
||||
if (hasFileAncestor(normalizedPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping deny path with file ancestor (cannot create paths under a file): ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
// Find the deepest existing ancestor directory
|
||||
let ancestorPath = path.dirname(normalizedPath);
|
||||
while (ancestorPath !== '/' && !fs.existsSync(ancestorPath)) {
|
||||
ancestorPath = path.dirname(ancestorPath);
|
||||
}
|
||||
// Only protect if the existing ancestor is within an allowed write path.
|
||||
// If not, the path is already read-only from --ro-bind / /.
|
||||
const ancestorIsWithinAllowedPath = allowedWritePaths.some(allowedPath => ancestorPath.startsWith(allowedPath + '/') ||
|
||||
ancestorPath === allowedPath ||
|
||||
normalizedPath.startsWith(allowedPath + '/'));
|
||||
if (ancestorIsWithinAllowedPath) {
|
||||
const firstNonExistent = findFirstNonExistentComponent(normalizedPath);
|
||||
// Fix 2: If firstNonExistent is an intermediate component (not the
|
||||
// leaf deny path itself), mount a read-only empty directory instead
|
||||
// of /dev/null. This prevents the component from appearing as a file
|
||||
// which breaks tools that expect to traverse it as a directory.
|
||||
if (firstNonExistent !== normalizedPath) {
|
||||
const emptyDir = fs.mkdtempSync(path.join(tmpdir(), 'claude-empty-'));
|
||||
args.push('--ro-bind', emptyDir, firstNonExistent);
|
||||
bwrapMountPoints.add(firstNonExistent);
|
||||
registerExitCleanupHandler();
|
||||
logForDebugging(`[Sandbox Linux] Mounted empty dir at ${firstNonExistent} to block creation of ${normalizedPath}`);
|
||||
}
|
||||
else {
|
||||
args.push('--ro-bind', '/dev/null', firstNonExistent);
|
||||
bwrapMountPoints.add(firstNonExistent);
|
||||
registerExitCleanupHandler();
|
||||
logForDebugging(`[Sandbox Linux] Mounted /dev/null at ${firstNonExistent} to block creation of ${normalizedPath}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
logForDebugging(`[Sandbox Linux] Skipping non-existent deny path not within allowed paths: ${normalizedPath}`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// Only add deny binding if this path is within an allowed write path
|
||||
// Otherwise it's already read-only from the initial --ro-bind / /
|
||||
const isWithinAllowedPath = allowedWritePaths.some(allowedPath => normalizedPath.startsWith(allowedPath + '/') ||
|
||||
normalizedPath === allowedPath);
|
||||
if (isWithinAllowedPath) {
|
||||
args.push('--ro-bind', normalizedPath, normalizedPath);
|
||||
}
|
||||
else {
|
||||
logForDebugging(`[Sandbox Linux] Skipping deny path not within allowed paths: ${normalizedPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// No write restrictions: Allow all writes
|
||||
args.push('--bind', '/', '/');
|
||||
}
|
||||
// Handle read restrictions by mounting tmpfs over denied paths
|
||||
const readDenyPaths = [...(readConfig?.denyOnly || [])];
|
||||
const readAllowPaths = (readConfig?.allowWithinDeny || []).map(p => normalizePathForSandbox(p));
|
||||
// Always hide /etc/ssh/ssh_config.d to avoid permission issues with OrbStack
|
||||
// SSH is very strict about config file permissions and ownership, and they can
|
||||
// appear wrong inside the sandbox causing "Bad owner or permissions" errors
|
||||
if (fs.existsSync('/etc/ssh/ssh_config.d')) {
|
||||
readDenyPaths.push('/etc/ssh/ssh_config.d');
|
||||
}
|
||||
for (const pathPattern of readDenyPaths) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (!fs.existsSync(normalizedPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping non-existent read deny path: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
const readDenyStat = fs.statSync(normalizedPath);
|
||||
if (readDenyStat.isDirectory()) {
|
||||
args.push('--tmpfs', normalizedPath);
|
||||
// Re-allow specific paths within the denied directory (allowRead overrides denyRead).
|
||||
// After mounting tmpfs over the denied dir, bind back the allowed subdirectories
|
||||
// so they are readable again.
|
||||
for (const allowPath of readAllowPaths) {
|
||||
if (allowPath.startsWith(normalizedPath + '/') ||
|
||||
allowPath === normalizedPath) {
|
||||
if (!fs.existsSync(allowPath)) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping non-existent read allow path: ${allowPath}`);
|
||||
continue;
|
||||
}
|
||||
// Bind the allowed path back over the tmpfs so it's readable
|
||||
args.push('--ro-bind', allowPath, allowPath);
|
||||
logForDebugging(`[Sandbox Linux] Re-allowed read access within denied region: ${allowPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// For files, check if this specific file is re-allowed
|
||||
const isReAllowed = readAllowPaths.some(allowPath => normalizedPath === allowPath ||
|
||||
normalizedPath.startsWith(allowPath + '/'));
|
||||
if (isReAllowed) {
|
||||
logForDebugging(`[Sandbox Linux] Skipping read deny for re-allowed path: ${normalizedPath}`);
|
||||
continue;
|
||||
}
|
||||
// For files, bind /dev/null instead of tmpfs
|
||||
args.push('--ro-bind', '/dev/null', normalizedPath);
|
||||
}
|
||||
}
|
||||
return args;
|
||||
}
|
||||
/**
|
||||
* Wrap a command with sandbox restrictions on Linux
|
||||
*
|
||||
* UNIX SOCKET BLOCKING (APPLY-SECCOMP):
|
||||
* This implementation uses a custom apply-seccomp binary to block Unix domain socket
|
||||
* creation for user commands while allowing network infrastructure:
|
||||
*
|
||||
* Stage 1: Outer bwrap - Network and filesystem isolation (NO seccomp)
|
||||
* - Bubblewrap starts with isolated network namespace (--unshare-net)
|
||||
* - Bubblewrap applies PID namespace isolation (--unshare-pid and --proc)
|
||||
* - Filesystem restrictions are applied (read-only mounts, bind mounts, etc.)
|
||||
* - Socat processes start and connect to Unix socket bridges (can use socket(AF_UNIX, ...))
|
||||
*
|
||||
* Stage 2: apply-seccomp - Seccomp filter application (ONLY seccomp)
|
||||
* - apply-seccomp binary applies seccomp filter via prctl(PR_SET_SECCOMP)
|
||||
* - Sets PR_SET_NO_NEW_PRIVS to allow seccomp without root
|
||||
* - Execs user command with seccomp active (cannot create new Unix sockets)
|
||||
*
|
||||
* This solves the conflict between:
|
||||
* - Security: Blocking arbitrary Unix socket creation in user commands
|
||||
* - Functionality: Network sandboxing requires socat to call socket(AF_UNIX, ...) for bridge connections
|
||||
*
|
||||
* The seccomp-bpf filter blocks socket(AF_UNIX, ...) syscalls, preventing:
|
||||
* - Creating new Unix domain socket file descriptors
|
||||
*
|
||||
* Security limitations:
|
||||
* - Does NOT block operations (bind, connect, sendto, etc.) on inherited Unix socket FDs
|
||||
* - Does NOT prevent passing Unix socket FDs via SCM_RIGHTS
|
||||
* - For most sandboxing use cases, blocking socket creation is sufficient
|
||||
*
|
||||
* The filter allows:
|
||||
* - All TCP/UDP sockets (AF_INET, AF_INET6) for normal network operations
|
||||
* - All other syscalls
|
||||
*
|
||||
* PLATFORM NOTE:
|
||||
* The allowUnixSockets configuration is not path-based on Linux (unlike macOS)
|
||||
* because seccomp-bpf cannot inspect user-space memory to read socket paths.
|
||||
*
|
||||
* Requirements for seccomp filtering:
|
||||
* - Pre-built apply-seccomp binaries are included for x64 and ARM64
|
||||
* - Pre-generated BPF filters are included for x64 and ARM64
|
||||
* - Other architectures are not currently supported (no apply-seccomp binary available)
|
||||
* - To use sandboxing without Unix socket blocking on unsupported architectures,
|
||||
* set allowAllUnixSockets: true in your configuration
|
||||
* Dependencies are checked by checkLinuxDependencies() before enabling the sandbox.
|
||||
*/
|
||||
export async function wrapCommandWithSandboxLinux(params) {
|
||||
const { command, needsNetworkRestriction, httpSocketPath, socksSocketPath, httpProxyPort, socksProxyPort, readConfig, writeConfig, enableWeakerNestedSandbox, allowAllUnixSockets, binShell, ripgrepConfig = { command: 'rg' }, mandatoryDenySearchDepth = DEFAULT_MANDATORY_DENY_SEARCH_DEPTH, allowGitConfig = false, seccompConfig, abortSignal, } = params;
|
||||
// Determine if we have restrictions to apply
|
||||
// Read: denyOnly pattern - empty array means no restrictions
|
||||
// Write: allowOnly pattern - undefined means no restrictions, any config means restrictions
|
||||
const hasReadRestrictions = readConfig && readConfig.denyOnly.length > 0;
|
||||
const hasWriteRestrictions = writeConfig !== undefined;
|
||||
// Check if we need any sandboxing
|
||||
if (!needsNetworkRestriction &&
|
||||
!hasReadRestrictions &&
|
||||
!hasWriteRestrictions) {
|
||||
return command;
|
||||
}
|
||||
const bwrapArgs = ['--new-session', '--die-with-parent'];
|
||||
let seccompFilterPath = undefined;
|
||||
try {
|
||||
// ========== SECCOMP FILTER (Unix Socket Blocking) ==========
|
||||
// Use bwrap's --seccomp flag to apply BPF filter that blocks Unix socket creation
|
||||
//
|
||||
// NOTE: Seccomp filtering is only enabled when allowAllUnixSockets is false
|
||||
// (when true, Unix sockets are allowed)
|
||||
if (!allowAllUnixSockets) {
|
||||
seccompFilterPath =
|
||||
generateSeccompFilter(seccompConfig?.bpfPath) ?? undefined;
|
||||
const applySeccompBinary = getApplySeccompBinaryPath(seccompConfig?.applyPath);
|
||||
if (!seccompFilterPath || !applySeccompBinary) {
|
||||
// Seccomp binaries not found - warn but continue without unix socket blocking
|
||||
logForDebugging('[Sandbox Linux] Seccomp binaries not available - unix socket blocking disabled. ' +
|
||||
'Install @anthropic-ai/sandbox-runtime globally for full protection.', { level: 'warn' });
|
||||
// Clear the filter path so we don't try to use it
|
||||
seccompFilterPath = undefined;
|
||||
}
|
||||
else {
|
||||
// Track filter for cleanup and register exit handler
|
||||
// Only track runtime-generated filters (not pre-generated ones from vendor/)
|
||||
if (!seccompFilterPath.includes('/vendor/seccomp/')) {
|
||||
generatedSeccompFilters.add(seccompFilterPath);
|
||||
registerExitCleanupHandler();
|
||||
}
|
||||
logForDebugging('[Sandbox Linux] Generated seccomp BPF filter for Unix socket blocking');
|
||||
}
|
||||
}
|
||||
else {
|
||||
logForDebugging('[Sandbox Linux] Skipping seccomp filter - allowAllUnixSockets is enabled');
|
||||
}
|
||||
// ========== NETWORK RESTRICTIONS ==========
|
||||
if (needsNetworkRestriction) {
|
||||
// Always unshare network namespace to isolate network access
|
||||
// This removes all network interfaces, effectively blocking all network
|
||||
bwrapArgs.push('--unshare-net');
|
||||
// If proxy sockets are provided, bind them into the sandbox to allow
|
||||
// filtered network access through the proxy. If not provided, network
|
||||
// is completely blocked (empty allowedDomains = block all)
|
||||
if (httpSocketPath && socksSocketPath) {
|
||||
// Verify socket files still exist before trying to bind them
|
||||
if (!fs.existsSync(httpSocketPath)) {
|
||||
throw new Error(`Linux HTTP bridge socket does not exist: ${httpSocketPath}. ` +
|
||||
'The bridge process may have died. Try reinitializing the sandbox.');
|
||||
}
|
||||
if (!fs.existsSync(socksSocketPath)) {
|
||||
throw new Error(`Linux SOCKS bridge socket does not exist: ${socksSocketPath}. ` +
|
||||
'The bridge process may have died. Try reinitializing the sandbox.');
|
||||
}
|
||||
// Bind both sockets into the sandbox
|
||||
bwrapArgs.push('--bind', httpSocketPath, httpSocketPath);
|
||||
bwrapArgs.push('--bind', socksSocketPath, socksSocketPath);
|
||||
// Add proxy environment variables
|
||||
// HTTP_PROXY points to the socat listener inside the sandbox (port 3128)
|
||||
// which forwards to the Unix socket that bridges to the host's proxy server
|
||||
const proxyEnv = generateProxyEnvVars(3128, // Internal HTTP listener port
|
||||
1080);
|
||||
bwrapArgs.push(...proxyEnv.flatMap((env) => {
|
||||
const firstEq = env.indexOf('=');
|
||||
const key = env.slice(0, firstEq);
|
||||
const value = env.slice(firstEq + 1);
|
||||
return ['--setenv', key, value];
|
||||
}));
|
||||
// Add host proxy port environment variables for debugging/transparency
|
||||
// These show which host ports the Unix socket bridges connect to
|
||||
if (httpProxyPort !== undefined) {
|
||||
bwrapArgs.push('--setenv', 'CLAUDE_CODE_HOST_HTTP_PROXY_PORT', String(httpProxyPort));
|
||||
}
|
||||
if (socksProxyPort !== undefined) {
|
||||
bwrapArgs.push('--setenv', 'CLAUDE_CODE_HOST_SOCKS_PROXY_PORT', String(socksProxyPort));
|
||||
}
|
||||
}
|
||||
// If no sockets provided, network is completely blocked (--unshare-net without proxy)
|
||||
}
|
||||
// ========== FILESYSTEM RESTRICTIONS ==========
|
||||
const fsArgs = await generateFilesystemArgs(readConfig, writeConfig, ripgrepConfig, mandatoryDenySearchDepth, allowGitConfig, abortSignal);
|
||||
bwrapArgs.push(...fsArgs);
|
||||
// Always bind /dev
|
||||
bwrapArgs.push('--dev', '/dev');
|
||||
// ========== PID NAMESPACE ISOLATION ==========
|
||||
// IMPORTANT: These must come AFTER filesystem binds for nested bwrap to work
|
||||
// By default, always unshare PID namespace and mount fresh /proc.
|
||||
// If we don't have --unshare-pid, it is possible to escape the sandbox.
|
||||
// If we don't have --proc, it is possible to read host /proc and leak information about code running
|
||||
// outside the sandbox. But, --proc is not available when running in unprivileged docker containers
|
||||
// so we support running without it if explicitly requested.
|
||||
bwrapArgs.push('--unshare-pid');
|
||||
if (!enableWeakerNestedSandbox) {
|
||||
// Mount fresh /proc if PID namespace is isolated (secure mode)
|
||||
bwrapArgs.push('--proc', '/proc');
|
||||
}
|
||||
// ========== COMMAND ==========
|
||||
// Use the user's shell (zsh, bash, etc.) to ensure aliases/snapshots work
|
||||
// Resolve the full path to the shell binary since bwrap doesn't use $PATH
|
||||
const shellName = binShell || 'bash';
|
||||
const shell = whichSync(shellName);
|
||||
if (!shell) {
|
||||
throw new Error(`Shell '${shellName}' not found in PATH`);
|
||||
}
|
||||
bwrapArgs.push('--', shell, '-c');
|
||||
// If we have network restrictions, use the network bridge setup with apply-seccomp for seccomp
|
||||
// Otherwise, just run the command directly with apply-seccomp if needed
|
||||
if (needsNetworkRestriction && httpSocketPath && socksSocketPath) {
|
||||
// Pass seccomp filter to buildSandboxCommand for apply-seccomp application
|
||||
// This allows socat to start before seccomp is applied
|
||||
const sandboxCommand = buildSandboxCommand(httpSocketPath, socksSocketPath, command, seccompFilterPath, shell, seccompConfig?.applyPath);
|
||||
bwrapArgs.push(sandboxCommand);
|
||||
}
|
||||
else if (seccompFilterPath) {
|
||||
// No network restrictions but we have seccomp - use apply-seccomp directly
|
||||
// apply-seccomp is a simple C program that applies the seccomp filter and execs the command
|
||||
const applySeccompBinary = getApplySeccompBinaryPath(seccompConfig?.applyPath);
|
||||
if (!applySeccompBinary) {
|
||||
throw new Error('apply-seccomp binary not found. This should have been caught earlier. ' +
|
||||
'Ensure vendor/seccomp/{x64,arm64}/apply-seccomp binaries are included in the package.');
|
||||
}
|
||||
const applySeccompCmd = shellquote.quote([
|
||||
applySeccompBinary,
|
||||
seccompFilterPath,
|
||||
shell,
|
||||
'-c',
|
||||
command,
|
||||
]);
|
||||
bwrapArgs.push(applySeccompCmd);
|
||||
}
|
||||
else {
|
||||
bwrapArgs.push(command);
|
||||
}
|
||||
// Build the outer bwrap command
|
||||
const wrappedCommand = shellquote.quote(['bwrap', ...bwrapArgs]);
|
||||
const restrictions = [];
|
||||
if (needsNetworkRestriction)
|
||||
restrictions.push('network');
|
||||
if (hasReadRestrictions || hasWriteRestrictions)
|
||||
restrictions.push('filesystem');
|
||||
if (seccompFilterPath)
|
||||
restrictions.push('seccomp(unix-block)');
|
||||
logForDebugging(`[Sandbox Linux] Wrapped command with bwrap (${restrictions.join(', ')} restrictions)`);
|
||||
return wrappedCommand;
|
||||
}
|
||||
catch (error) {
|
||||
// Clean up seccomp filter on error
|
||||
if (seccompFilterPath && !seccompFilterPath.includes('/vendor/seccomp/')) {
|
||||
generatedSeccompFilters.delete(seccompFilterPath);
|
||||
try {
|
||||
cleanupSeccompFilter(seccompFilterPath);
|
||||
}
|
||||
catch (cleanupError) {
|
||||
logForDebugging(`[Sandbox Linux] Failed to clean up seccomp filter on error: ${cleanupError}`, { level: 'error' });
|
||||
}
|
||||
}
|
||||
// Re-throw the original error
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=linux-sandbox-utils.js.map
|
||||
630
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/macos-sandbox-utils.js
generated
vendored
Normal file
630
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/macos-sandbox-utils.js
generated
vendored
Normal file
@@ -0,0 +1,630 @@
|
||||
import shellquote from 'shell-quote';
|
||||
import { spawn } from 'child_process';
|
||||
import * as path from 'path';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
import { whichSync } from '../utils/which.js';
|
||||
import { normalizePathForSandbox, generateProxyEnvVars, encodeSandboxedCommand, decodeSandboxedCommand, containsGlobChars, globToRegex, DANGEROUS_FILES, getDangerousDirectories, } from './sandbox-utils.js';
|
||||
/**
|
||||
* Get mandatory deny patterns as glob patterns (no filesystem scanning).
|
||||
* macOS sandbox profile supports regex/glob matching directly via globToRegex().
|
||||
*/
|
||||
export function macGetMandatoryDenyPatterns(allowGitConfig = false) {
|
||||
const cwd = process.cwd();
|
||||
const denyPaths = [];
|
||||
// Dangerous files - static paths in CWD + glob patterns for subtree
|
||||
for (const fileName of DANGEROUS_FILES) {
|
||||
denyPaths.push(path.resolve(cwd, fileName));
|
||||
denyPaths.push(`**/${fileName}`);
|
||||
}
|
||||
// Dangerous directories
|
||||
for (const dirName of getDangerousDirectories()) {
|
||||
denyPaths.push(path.resolve(cwd, dirName));
|
||||
denyPaths.push(`**/${dirName}/**`);
|
||||
}
|
||||
// Git hooks are always blocked for security
|
||||
denyPaths.push(path.resolve(cwd, '.git/hooks'));
|
||||
denyPaths.push('**/.git/hooks/**');
|
||||
// Git config - conditionally blocked based on allowGitConfig setting
|
||||
if (!allowGitConfig) {
|
||||
denyPaths.push(path.resolve(cwd, '.git/config'));
|
||||
denyPaths.push('**/.git/config');
|
||||
}
|
||||
return [...new Set(denyPaths)];
|
||||
}
|
||||
const sessionSuffix = `_${Math.random().toString(36).slice(2, 11)}_SBX`;
|
||||
/**
|
||||
* Generate a unique log tag for sandbox monitoring
|
||||
* @param command - The command being executed (will be base64 encoded)
|
||||
*/
|
||||
function generateLogTag(command) {
|
||||
const encodedCommand = encodeSandboxedCommand(command);
|
||||
return `CMD64_${encodedCommand}_END_${sessionSuffix}`;
|
||||
}
|
||||
/**
|
||||
* Get all ancestor directories for a path, up to (but not including) root
|
||||
* Example: /private/tmp/test/file.txt -> ["/private/tmp/test", "/private/tmp", "/private"]
|
||||
*/
|
||||
function getAncestorDirectories(pathStr) {
|
||||
const ancestors = [];
|
||||
let currentPath = path.dirname(pathStr);
|
||||
// Walk up the directory tree until we reach root
|
||||
while (currentPath !== '/' && currentPath !== '.') {
|
||||
ancestors.push(currentPath);
|
||||
const parentPath = path.dirname(currentPath);
|
||||
// Break if we've reached the top (path.dirname returns the same path for root)
|
||||
if (parentPath === currentPath) {
|
||||
break;
|
||||
}
|
||||
currentPath = parentPath;
|
||||
}
|
||||
return ancestors;
|
||||
}
|
||||
/**
|
||||
* Generate deny rules for file movement (file-write-unlink) to protect paths
|
||||
* This prevents bypassing read or write restrictions by moving files/directories
|
||||
*
|
||||
* @param pathPatterns - Array of path patterns to protect (can include globs)
|
||||
* @param logTag - Log tag for sandbox violations
|
||||
* @returns Array of sandbox profile rule lines
|
||||
*/
|
||||
function generateMoveBlockingRules(pathPatterns, logTag) {
|
||||
const rules = [];
|
||||
for (const pathPattern of pathPatterns) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Use regex matching for glob patterns
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
// Block moving/renaming files matching this pattern
|
||||
rules.push(`(deny file-write-unlink`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
// For glob patterns, extract the static prefix and block ancestor moves
|
||||
// Remove glob characters to get the directory prefix
|
||||
const staticPrefix = normalizedPath.split(/[*?[\]]/)[0];
|
||||
if (staticPrefix && staticPrefix !== '/') {
|
||||
// Get the directory containing the glob pattern
|
||||
const baseDir = staticPrefix.endsWith('/')
|
||||
? staticPrefix.slice(0, -1)
|
||||
: path.dirname(staticPrefix);
|
||||
// Block moves of the base directory itself
|
||||
rules.push(`(deny file-write-unlink`, ` (literal ${escapePath(baseDir)})`, ` (with message "${logTag}"))`);
|
||||
// Block moves of ancestor directories
|
||||
for (const ancestorDir of getAncestorDirectories(baseDir)) {
|
||||
rules.push(`(deny file-write-unlink`, ` (literal ${escapePath(ancestorDir)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Use subpath matching for literal paths
|
||||
// Block moving/renaming the denied path itself
|
||||
rules.push(`(deny file-write-unlink`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
// Block moves of ancestor directories
|
||||
for (const ancestorDir of getAncestorDirectories(normalizedPath)) {
|
||||
rules.push(`(deny file-write-unlink`, ` (literal ${escapePath(ancestorDir)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Generate filesystem read rules for sandbox profile
|
||||
*
|
||||
* Supports two layers:
|
||||
* 1. denyOnly: deny reads from these paths (broad regions like /Users)
|
||||
* 2. allowWithinDeny: re-allow reads within denied regions (like CWD)
|
||||
* allowWithinDeny takes precedence over denyOnly.
|
||||
*
|
||||
* In Seatbelt profiles, later rules take precedence, so we emit:
|
||||
* (allow file-read*) ← default: allow everything
|
||||
* (deny file-read* ...) ← deny broad regions
|
||||
* (allow file-read* ...) ← re-allow specific paths within denied regions
|
||||
*/
|
||||
function generateReadRules(config, logTag) {
|
||||
if (!config) {
|
||||
return [`(allow file-read*)`];
|
||||
}
|
||||
const rules = [];
|
||||
// Start by allowing everything
|
||||
rules.push(`(allow file-read*)`);
|
||||
// Then deny specific paths
|
||||
for (const pathPattern of config.denyOnly || []) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Use regex matching for glob patterns
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
rules.push(`(deny file-read*`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
else {
|
||||
// Use subpath matching for literal paths
|
||||
rules.push(`(deny file-read*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
// Re-allow specific paths within denied regions (allowWithinDeny takes precedence)
|
||||
for (const pathPattern of config.allowWithinDeny || []) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
rules.push(`(allow file-read*`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
else {
|
||||
rules.push(`(allow file-read*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
// Allow stat/lstat on all directories so that realpath() can traverse
|
||||
// path components within denied regions. Without this, C realpath() fails
|
||||
// when resolving symlinks because it needs to lstat every intermediate
|
||||
// directory (e.g. /Users, /Users/chris) even if only a subdirectory like
|
||||
// ~/.local is in allowWithinDeny. This only allows metadata reads on
|
||||
// directories — not listing contents (readdir) or reading files.
|
||||
if ((config.denyOnly).length > 0) {
|
||||
rules.push(`(allow file-read-metadata`, ` (vnode-type DIRECTORY))`);
|
||||
}
|
||||
// Block file movement to prevent bypass via mv/rename
|
||||
rules.push(...generateMoveBlockingRules(config.denyOnly || [], logTag));
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Generate filesystem write rules for sandbox profile
|
||||
*/
|
||||
function generateWriteRules(config, logTag, allowGitConfig = false) {
|
||||
if (!config) {
|
||||
return [`(allow file-write*)`];
|
||||
}
|
||||
const rules = [];
|
||||
// Automatically allow TMPDIR parent on macOS when write restrictions are enabled
|
||||
const tmpdirParents = getTmpdirParentIfMacOSPattern();
|
||||
for (const tmpdirParent of tmpdirParents) {
|
||||
const normalizedPath = normalizePathForSandbox(tmpdirParent);
|
||||
rules.push(`(allow file-write*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
// Generate allow rules
|
||||
for (const pathPattern of config.allowOnly || []) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Use regex matching for glob patterns
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
rules.push(`(allow file-write*`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
else {
|
||||
// Use subpath matching for literal paths
|
||||
rules.push(`(allow file-write*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
// Combine user-specified and mandatory deny patterns (no ripgrep needed on macOS)
|
||||
const denyPaths = [
|
||||
...(config.denyWithinAllow || []),
|
||||
...macGetMandatoryDenyPatterns(allowGitConfig),
|
||||
];
|
||||
for (const pathPattern of denyPaths) {
|
||||
const normalizedPath = normalizePathForSandbox(pathPattern);
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Use regex matching for glob patterns
|
||||
const regexPattern = globToRegex(normalizedPath);
|
||||
rules.push(`(deny file-write*`, ` (regex ${escapePath(regexPattern)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
else {
|
||||
// Use subpath matching for literal paths
|
||||
rules.push(`(deny file-write*`, ` (subpath ${escapePath(normalizedPath)})`, ` (with message "${logTag}"))`);
|
||||
}
|
||||
}
|
||||
// Block file movement to prevent bypass via mv/rename
|
||||
rules.push(...generateMoveBlockingRules(denyPaths, logTag));
|
||||
return rules;
|
||||
}
|
||||
/**
|
||||
* Generate complete sandbox profile
|
||||
*/
|
||||
function generateSandboxProfile({ readConfig, writeConfig, httpProxyPort, socksProxyPort, needsNetworkRestriction, allowUnixSockets, allowAllUnixSockets, allowLocalBinding, allowPty, allowGitConfig = false, enableWeakerNetworkIsolation = false, logTag, }) {
|
||||
const profile = [
|
||||
'(version 1)',
|
||||
`(deny default (with message "${logTag}"))`,
|
||||
'',
|
||||
`; LogTag: ${logTag}`,
|
||||
'',
|
||||
'; Essential permissions - based on Chrome sandbox policy',
|
||||
'; Process permissions',
|
||||
'(allow process-exec)',
|
||||
'(allow process-fork)',
|
||||
'(allow process-info* (target same-sandbox))',
|
||||
'(allow signal (target same-sandbox))',
|
||||
'(allow mach-priv-task-port (target same-sandbox))',
|
||||
'',
|
||||
'; User preferences',
|
||||
'(allow user-preference-read)',
|
||||
'',
|
||||
'; Mach IPC - specific services only (no wildcard)',
|
||||
'(allow mach-lookup',
|
||||
' (global-name "com.apple.audio.systemsoundserver")',
|
||||
' (global-name "com.apple.distributed_notifications@Uv3")',
|
||||
' (global-name "com.apple.FontObjectsServer")',
|
||||
' (global-name "com.apple.fonts")',
|
||||
' (global-name "com.apple.logd")',
|
||||
' (global-name "com.apple.lsd.mapdb")',
|
||||
' (global-name "com.apple.PowerManagement.control")',
|
||||
' (global-name "com.apple.system.logger")',
|
||||
' (global-name "com.apple.system.notification_center")',
|
||||
' (global-name "com.apple.system.opendirectoryd.libinfo")',
|
||||
' (global-name "com.apple.system.opendirectoryd.membership")',
|
||||
' (global-name "com.apple.bsd.dirhelper")',
|
||||
' (global-name "com.apple.securityd.xpc")',
|
||||
' (global-name "com.apple.coreservices.launchservicesd")',
|
||||
')',
|
||||
'',
|
||||
...(enableWeakerNetworkIsolation
|
||||
? [
|
||||
'; trustd.agent - needed for Go TLS certificate verification (weaker network isolation)',
|
||||
'(allow mach-lookup (global-name "com.apple.trustd.agent"))',
|
||||
]
|
||||
: []),
|
||||
'',
|
||||
'; POSIX IPC - shared memory',
|
||||
'(allow ipc-posix-shm)',
|
||||
'',
|
||||
'; POSIX IPC - semaphores for Python multiprocessing',
|
||||
'(allow ipc-posix-sem)',
|
||||
'',
|
||||
'; IOKit - specific operations only',
|
||||
'(allow iokit-open',
|
||||
' (iokit-registry-entry-class "IOSurfaceRootUserClient")',
|
||||
' (iokit-registry-entry-class "RootDomainUserClient")',
|
||||
' (iokit-user-client-class "IOSurfaceSendRight")',
|
||||
')',
|
||||
'',
|
||||
'; IOKit properties',
|
||||
'(allow iokit-get-properties)',
|
||||
'',
|
||||
"; Specific safe system-sockets, doesn't allow network access",
|
||||
'(allow system-socket (require-all (socket-domain AF_SYSTEM) (socket-protocol 2)))',
|
||||
'',
|
||||
'; sysctl - specific sysctls only',
|
||||
'(allow sysctl-read',
|
||||
' (sysctl-name "hw.activecpu")',
|
||||
' (sysctl-name "hw.busfrequency_compat")',
|
||||
' (sysctl-name "hw.byteorder")',
|
||||
' (sysctl-name "hw.cacheconfig")',
|
||||
' (sysctl-name "hw.cachelinesize_compat")',
|
||||
' (sysctl-name "hw.cpufamily")',
|
||||
' (sysctl-name "hw.cpufrequency")',
|
||||
' (sysctl-name "hw.cpufrequency_compat")',
|
||||
' (sysctl-name "hw.cputype")',
|
||||
' (sysctl-name "hw.l1dcachesize_compat")',
|
||||
' (sysctl-name "hw.l1icachesize_compat")',
|
||||
' (sysctl-name "hw.l2cachesize_compat")',
|
||||
' (sysctl-name "hw.l3cachesize_compat")',
|
||||
' (sysctl-name "hw.logicalcpu")',
|
||||
' (sysctl-name "hw.logicalcpu_max")',
|
||||
' (sysctl-name "hw.machine")',
|
||||
' (sysctl-name "hw.memsize")',
|
||||
' (sysctl-name "hw.ncpu")',
|
||||
' (sysctl-name "hw.nperflevels")',
|
||||
' (sysctl-name "hw.packages")',
|
||||
' (sysctl-name "hw.pagesize_compat")',
|
||||
' (sysctl-name "hw.pagesize")',
|
||||
' (sysctl-name "hw.physicalcpu")',
|
||||
' (sysctl-name "hw.physicalcpu_max")',
|
||||
' (sysctl-name "hw.tbfrequency_compat")',
|
||||
' (sysctl-name "hw.vectorunit")',
|
||||
' (sysctl-name "kern.argmax")',
|
||||
' (sysctl-name "kern.bootargs")',
|
||||
' (sysctl-name "kern.hostname")',
|
||||
' (sysctl-name "kern.maxfiles")',
|
||||
' (sysctl-name "kern.maxfilesperproc")',
|
||||
' (sysctl-name "kern.maxproc")',
|
||||
' (sysctl-name "kern.ngroups")',
|
||||
' (sysctl-name "kern.osproductversion")',
|
||||
' (sysctl-name "kern.osrelease")',
|
||||
' (sysctl-name "kern.ostype")',
|
||||
' (sysctl-name "kern.osvariant_status")',
|
||||
' (sysctl-name "kern.osversion")',
|
||||
' (sysctl-name "kern.secure_kernel")',
|
||||
' (sysctl-name "kern.tcsm_available")',
|
||||
' (sysctl-name "kern.tcsm_enable")',
|
||||
' (sysctl-name "kern.usrstack64")',
|
||||
' (sysctl-name "kern.version")',
|
||||
' (sysctl-name "kern.willshutdown")',
|
||||
' (sysctl-name "machdep.cpu.brand_string")',
|
||||
' (sysctl-name "machdep.ptrauth_enabled")',
|
||||
' (sysctl-name "security.mac.lockdown_mode_state")',
|
||||
' (sysctl-name "sysctl.proc_cputype")',
|
||||
' (sysctl-name "vm.loadavg")',
|
||||
' (sysctl-name-prefix "hw.optional.arm")',
|
||||
' (sysctl-name-prefix "hw.optional.arm.")',
|
||||
' (sysctl-name-prefix "hw.optional.armv8_")',
|
||||
' (sysctl-name-prefix "hw.perflevel")',
|
||||
' (sysctl-name-prefix "kern.proc.all")',
|
||||
' (sysctl-name-prefix "kern.proc.pgrp.")',
|
||||
' (sysctl-name-prefix "kern.proc.pid.")',
|
||||
' (sysctl-name-prefix "machdep.cpu.")',
|
||||
' (sysctl-name-prefix "net.routetable.")',
|
||||
')',
|
||||
'',
|
||||
'; V8 thread calculations',
|
||||
'(allow sysctl-write',
|
||||
' (sysctl-name "kern.tcsm_enable")',
|
||||
')',
|
||||
'',
|
||||
'; Distributed notifications',
|
||||
'(allow distributed-notification-post)',
|
||||
'',
|
||||
'; Specific mach-lookup permissions for security operations',
|
||||
'(allow mach-lookup (global-name "com.apple.SecurityServer"))',
|
||||
'',
|
||||
'; File I/O on device files',
|
||||
'(allow file-ioctl (literal "/dev/null"))',
|
||||
'(allow file-ioctl (literal "/dev/zero"))',
|
||||
'(allow file-ioctl (literal "/dev/random"))',
|
||||
'(allow file-ioctl (literal "/dev/urandom"))',
|
||||
'(allow file-ioctl (literal "/dev/dtracehelper"))',
|
||||
'(allow file-ioctl (literal "/dev/tty"))',
|
||||
'',
|
||||
'(allow file-ioctl file-read-data file-write-data',
|
||||
' (require-all',
|
||||
' (literal "/dev/null")',
|
||||
' (vnode-type CHARACTER-DEVICE)',
|
||||
' )',
|
||||
')',
|
||||
'',
|
||||
];
|
||||
// Network rules
|
||||
profile.push('; Network');
|
||||
if (!needsNetworkRestriction) {
|
||||
profile.push('(allow network*)');
|
||||
}
|
||||
else {
|
||||
// Allow local binding if requested
|
||||
// Use "*:*" instead of "localhost:*" because modern runtimes (Java, etc.) create
|
||||
// IPv6 dual-stack sockets by default. When binding such a socket to 127.0.0.1,
|
||||
// the kernel represents it as ::ffff:127.0.0.1 (IPv4-mapped IPv6). Seatbelt's
|
||||
// "localhost" filter only matches 127.0.0.1 and ::1, NOT ::ffff:127.0.0.1.
|
||||
// Using (local ip "*:*") is safe because it only matches the LOCAL endpoint —
|
||||
// internet-bound connections originate from non-loopback interfaces, so they
|
||||
// remain blocked by (deny default).
|
||||
if (allowLocalBinding) {
|
||||
profile.push('(allow network-bind (local ip "*:*"))');
|
||||
profile.push('(allow network-inbound (local ip "*:*"))');
|
||||
profile.push('(allow network-outbound (local ip "*:*"))');
|
||||
}
|
||||
// Unix domain sockets for local IPC (SSH agent, Docker, Gradle, etc.)
|
||||
// Three separate operations must be allowed:
|
||||
// 1. system-socket: socket(AF_UNIX, ...) syscall — creates the socket fd (no path context)
|
||||
// 2. network-bind: bind() to a local Unix socket path
|
||||
// 3. network-outbound: connect() to a remote Unix socket path
|
||||
// Note: (subpath ...) and (path-regex ...) are path-based filters that can only match
|
||||
// bind/connect operations — socket() creation has no path, so it requires system-socket.
|
||||
if (allowAllUnixSockets) {
|
||||
// Allow creating AF_UNIX sockets and all Unix socket paths
|
||||
profile.push('(allow system-socket (socket-domain AF_UNIX))');
|
||||
profile.push('(allow network-bind (local unix-socket (path-regex #"^/")))');
|
||||
profile.push('(allow network-outbound (remote unix-socket (path-regex #"^/")))');
|
||||
}
|
||||
else if (allowUnixSockets && allowUnixSockets.length > 0) {
|
||||
// Allow creating AF_UNIX sockets (required for any Unix socket use)
|
||||
profile.push('(allow system-socket (socket-domain AF_UNIX))');
|
||||
// Allow specific Unix socket paths
|
||||
for (const socketPath of allowUnixSockets) {
|
||||
const normalizedPath = normalizePathForSandbox(socketPath);
|
||||
profile.push(`(allow network-bind (local unix-socket (subpath ${escapePath(normalizedPath)})))`);
|
||||
profile.push(`(allow network-outbound (remote unix-socket (subpath ${escapePath(normalizedPath)})))`);
|
||||
}
|
||||
}
|
||||
// If both allowAllUnixSockets and allowUnixSockets are false/undefined/empty, Unix sockets are blocked by default
|
||||
// Allow localhost TCP operations for the HTTP proxy
|
||||
if (httpProxyPort !== undefined) {
|
||||
profile.push(`(allow network-bind (local ip "localhost:${httpProxyPort}"))`);
|
||||
profile.push(`(allow network-inbound (local ip "localhost:${httpProxyPort}"))`);
|
||||
profile.push(`(allow network-outbound (remote ip "localhost:${httpProxyPort}"))`);
|
||||
}
|
||||
// Allow localhost TCP operations for the SOCKS proxy
|
||||
if (socksProxyPort !== undefined) {
|
||||
profile.push(`(allow network-bind (local ip "localhost:${socksProxyPort}"))`);
|
||||
profile.push(`(allow network-inbound (local ip "localhost:${socksProxyPort}"))`);
|
||||
profile.push(`(allow network-outbound (remote ip "localhost:${socksProxyPort}"))`);
|
||||
}
|
||||
}
|
||||
profile.push('');
|
||||
// Read rules
|
||||
profile.push('; File read');
|
||||
profile.push(...generateReadRules(readConfig, logTag));
|
||||
profile.push('');
|
||||
// Write rules
|
||||
profile.push('; File write');
|
||||
profile.push(...generateWriteRules(writeConfig, logTag, allowGitConfig));
|
||||
// Pseudo-terminal (pty) support
|
||||
if (allowPty) {
|
||||
profile.push('');
|
||||
profile.push('; Pseudo-terminal (pty) support');
|
||||
profile.push('(allow pseudo-tty)');
|
||||
profile.push('(allow file-ioctl');
|
||||
profile.push(' (literal "/dev/ptmx")');
|
||||
profile.push(' (regex #"^/dev/ttys")');
|
||||
profile.push(')');
|
||||
profile.push('(allow file-read* file-write*');
|
||||
profile.push(' (literal "/dev/ptmx")');
|
||||
profile.push(' (regex #"^/dev/ttys")');
|
||||
profile.push(')');
|
||||
}
|
||||
return profile.join('\n');
|
||||
}
|
||||
/**
|
||||
* Escape path for sandbox profile using JSON.stringify for proper escaping
|
||||
*/
|
||||
function escapePath(pathStr) {
|
||||
return JSON.stringify(pathStr);
|
||||
}
|
||||
/**
|
||||
* Get TMPDIR parent directory if it matches macOS pattern /var/folders/XX/YYY/T/
|
||||
* Returns both /var/ and /private/var/ versions since /var is a symlink
|
||||
*/
|
||||
function getTmpdirParentIfMacOSPattern() {
|
||||
const tmpdir = process.env.TMPDIR;
|
||||
if (!tmpdir)
|
||||
return [];
|
||||
const match = tmpdir.match(/^\/(private\/)?var\/folders\/[^/]{2}\/[^/]+\/T\/?$/);
|
||||
if (!match)
|
||||
return [];
|
||||
const parent = tmpdir.replace(/\/T\/?$/, '');
|
||||
// Return both /var/ and /private/var/ versions since /var is a symlink
|
||||
if (parent.startsWith('/private/var/')) {
|
||||
return [parent, parent.replace('/private', '')];
|
||||
}
|
||||
else if (parent.startsWith('/var/')) {
|
||||
return [parent, '/private' + parent];
|
||||
}
|
||||
return [parent];
|
||||
}
|
||||
/**
|
||||
* Wrap command with macOS sandbox
|
||||
*/
|
||||
export function wrapCommandWithSandboxMacOS(params) {
|
||||
const { command, needsNetworkRestriction, httpProxyPort, socksProxyPort, allowUnixSockets, allowAllUnixSockets, allowLocalBinding, readConfig, writeConfig, allowPty, allowGitConfig = false, enableWeakerNetworkIsolation = false, binShell, } = params;
|
||||
// Determine if we have restrictions to apply
|
||||
// Read: denyOnly pattern - empty array means no restrictions
|
||||
// Write: allowOnly pattern - undefined means no restrictions, any config means restrictions
|
||||
const hasReadRestrictions = readConfig && readConfig.denyOnly.length > 0;
|
||||
const hasWriteRestrictions = writeConfig !== undefined;
|
||||
// No sandboxing needed
|
||||
if (!needsNetworkRestriction &&
|
||||
!hasReadRestrictions &&
|
||||
!hasWriteRestrictions) {
|
||||
return command;
|
||||
}
|
||||
const logTag = generateLogTag(command);
|
||||
const profile = generateSandboxProfile({
|
||||
readConfig,
|
||||
writeConfig,
|
||||
httpProxyPort,
|
||||
socksProxyPort,
|
||||
needsNetworkRestriction,
|
||||
allowUnixSockets,
|
||||
allowAllUnixSockets,
|
||||
allowLocalBinding,
|
||||
allowPty,
|
||||
allowGitConfig,
|
||||
enableWeakerNetworkIsolation,
|
||||
logTag,
|
||||
});
|
||||
// Generate proxy environment variables using shared utility
|
||||
const proxyEnvArgs = generateProxyEnvVars(httpProxyPort, socksProxyPort);
|
||||
// Use the user's shell (zsh, bash, etc.) to ensure aliases/snapshots work
|
||||
// Resolve the full path to the shell binary
|
||||
const shellName = binShell || 'bash';
|
||||
const shell = whichSync(shellName);
|
||||
if (!shell) {
|
||||
throw new Error(`Shell '${shellName}' not found in PATH`);
|
||||
}
|
||||
// Use `env` command to set environment variables - each VAR=value is a separate
|
||||
// argument that shellquote handles properly, avoiding shell quoting issues
|
||||
const wrappedCommand = shellquote.quote([
|
||||
'env',
|
||||
...proxyEnvArgs,
|
||||
'sandbox-exec',
|
||||
'-p',
|
||||
profile,
|
||||
shell,
|
||||
'-c',
|
||||
command,
|
||||
]);
|
||||
logForDebugging(`[Sandbox macOS] Applied restrictions - network: ${!!(httpProxyPort || socksProxyPort)}, read: ${readConfig
|
||||
? 'allowAllExcept' in readConfig
|
||||
? 'allowAllExcept'
|
||||
: 'denyAllExcept'
|
||||
: 'none'}, write: ${writeConfig
|
||||
? 'allowAllExcept' in writeConfig
|
||||
? 'allowAllExcept'
|
||||
: 'denyAllExcept'
|
||||
: 'none'}`);
|
||||
return wrappedCommand;
|
||||
}
|
||||
/**
|
||||
* Start monitoring macOS system logs for sandbox violations
|
||||
* Look for sandbox-related kernel deny events ending in {logTag}
|
||||
*/
|
||||
export function startMacOSSandboxLogMonitor(callback, ignoreViolations) {
|
||||
// Pre-compile regex patterns for better performance
|
||||
const cmdExtractRegex = /CMD64_(.+?)_END/;
|
||||
const sandboxExtractRegex = /Sandbox:\s+(.+)$/;
|
||||
// Pre-process ignore patterns for faster lookup
|
||||
const wildcardPaths = ignoreViolations?.['*'] || [];
|
||||
const commandPatterns = ignoreViolations
|
||||
? Object.entries(ignoreViolations).filter(([pattern]) => pattern !== '*')
|
||||
: [];
|
||||
// Stream and filter kernel logs for all sandbox violations
|
||||
// We can't filter by specific logTag since it's dynamic per command
|
||||
const logProcess = spawn('log', [
|
||||
'stream',
|
||||
'--predicate',
|
||||
`(eventMessage ENDSWITH "${sessionSuffix}")`,
|
||||
'--style',
|
||||
'compact',
|
||||
]);
|
||||
logProcess.stdout?.on('data', (data) => {
|
||||
const lines = data.toString().split('\n');
|
||||
// Get violation and command lines
|
||||
const violationLine = lines.find(line => line.includes('Sandbox:') && line.includes('deny'));
|
||||
const commandLine = lines.find(line => line.startsWith('CMD64_'));
|
||||
if (!violationLine)
|
||||
return;
|
||||
// Extract violation details
|
||||
const sandboxMatch = violationLine.match(sandboxExtractRegex);
|
||||
if (!sandboxMatch?.[1])
|
||||
return;
|
||||
const violationDetails = sandboxMatch[1];
|
||||
// Try to get command
|
||||
let command;
|
||||
let encodedCommand;
|
||||
if (commandLine) {
|
||||
const cmdMatch = commandLine.match(cmdExtractRegex);
|
||||
encodedCommand = cmdMatch?.[1];
|
||||
if (encodedCommand) {
|
||||
try {
|
||||
command = decodeSandboxedCommand(encodedCommand);
|
||||
}
|
||||
catch {
|
||||
// Failed to decode, continue without command
|
||||
}
|
||||
}
|
||||
}
|
||||
// Always filter out noisey violations
|
||||
if (violationDetails.includes('mDNSResponder') ||
|
||||
violationDetails.includes('mach-lookup com.apple.diagnosticd') ||
|
||||
violationDetails.includes('mach-lookup com.apple.analyticsd')) {
|
||||
return;
|
||||
}
|
||||
// Check if we should ignore this violation
|
||||
if (ignoreViolations && command) {
|
||||
// Check wildcard patterns first
|
||||
if (wildcardPaths.length > 0) {
|
||||
const shouldIgnore = wildcardPaths.some(path => violationDetails.includes(path));
|
||||
if (shouldIgnore)
|
||||
return;
|
||||
}
|
||||
// Check command-specific patterns
|
||||
for (const [pattern, paths] of commandPatterns) {
|
||||
if (command.includes(pattern)) {
|
||||
const shouldIgnore = paths.some(path => violationDetails.includes(path));
|
||||
if (shouldIgnore)
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Not ignored - report the violation
|
||||
callback({
|
||||
line: violationDetails,
|
||||
command,
|
||||
encodedCommand,
|
||||
timestamp: new Date(), // We could parse the timestamp from the log but this feels more reliable
|
||||
});
|
||||
});
|
||||
logProcess.stderr?.on('data', (data) => {
|
||||
logForDebugging(`[Sandbox Monitor] Log stream stderr: ${data.toString()}`);
|
||||
});
|
||||
logProcess.on('error', (error) => {
|
||||
logForDebugging(`[Sandbox Monitor] Failed to start log stream: ${error.message}`);
|
||||
});
|
||||
logProcess.on('exit', (code) => {
|
||||
logForDebugging(`[Sandbox Monitor] Log stream exited with code: ${code}`);
|
||||
});
|
||||
return () => {
|
||||
logForDebugging('[Sandbox Monitor] Stopping log monitor');
|
||||
logProcess.kill('SIGTERM');
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=macos-sandbox-utils.js.map
|
||||
180
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-config.js
generated
vendored
Normal file
180
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-config.js
generated
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
/**
|
||||
* Configuration for Sandbox Runtime
|
||||
* This is the main configuration interface that consumers pass to SandboxManager.initialize()
|
||||
*/
|
||||
import { z } from 'zod';
|
||||
/**
|
||||
* Schema for domain patterns (e.g., "example.com", "*.npmjs.org")
|
||||
* Validates that domain patterns are safe and don't include overly broad wildcards
|
||||
*/
|
||||
const domainPatternSchema = z.string().refine(val => {
|
||||
// Reject protocols, paths, ports, etc.
|
||||
if (val.includes('://') || val.includes('/') || val.includes(':')) {
|
||||
return false;
|
||||
}
|
||||
// Allow localhost
|
||||
if (val === 'localhost')
|
||||
return true;
|
||||
// Allow wildcard domains like *.example.com
|
||||
if (val.startsWith('*.')) {
|
||||
const domain = val.slice(2);
|
||||
// After the *. there must be a valid domain with at least one more dot
|
||||
// e.g., *.example.com is valid, *.com is not (too broad)
|
||||
if (!domain.includes('.') ||
|
||||
domain.startsWith('.') ||
|
||||
domain.endsWith('.')) {
|
||||
return false;
|
||||
}
|
||||
// Count dots - must have at least 2 parts after the wildcard (e.g., example.com)
|
||||
const parts = domain.split('.');
|
||||
return parts.length >= 2 && parts.every(p => p.length > 0);
|
||||
}
|
||||
// Reject any other use of wildcards (e.g., *, *., etc.)
|
||||
if (val.includes('*')) {
|
||||
return false;
|
||||
}
|
||||
// Regular domains must have at least one dot and only valid characters
|
||||
return val.includes('.') && !val.startsWith('.') && !val.endsWith('.');
|
||||
}, {
|
||||
message: 'Invalid domain pattern. Must be a valid domain (e.g., "example.com") or wildcard (e.g., "*.example.com"). Overly broad patterns like "*.com" or "*" are not allowed for security reasons.',
|
||||
});
|
||||
/**
|
||||
* Schema for filesystem paths
|
||||
*/
|
||||
const filesystemPathSchema = z.string().min(1, 'Path cannot be empty');
|
||||
/**
|
||||
* Schema for MITM proxy configuration
|
||||
* Allows routing specific domains through an upstream MITM proxy via Unix socket
|
||||
*/
|
||||
const MitmProxyConfigSchema = z.object({
|
||||
socketPath: z.string().min(1).describe('Unix socket path to the MITM proxy'),
|
||||
domains: z
|
||||
.array(domainPatternSchema)
|
||||
.min(1)
|
||||
.describe('Domains to route through the MITM proxy (e.g., ["api.example.com", "*.internal.org"])'),
|
||||
});
|
||||
/**
|
||||
* Network configuration schema for validation
|
||||
*/
|
||||
export const NetworkConfigSchema = z.object({
|
||||
allowedDomains: z
|
||||
.array(domainPatternSchema)
|
||||
.describe('List of allowed domains (e.g., ["github.com", "*.npmjs.org"])'),
|
||||
deniedDomains: z
|
||||
.array(domainPatternSchema)
|
||||
.describe('List of denied domains'),
|
||||
allowUnixSockets: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('macOS only: Unix socket paths to allow. Ignored on Linux (seccomp cannot filter by path).'),
|
||||
allowAllUnixSockets: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('If true, allow all Unix sockets (disables blocking on both platforms).'),
|
||||
allowLocalBinding: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Whether to allow binding to local ports (default: false)'),
|
||||
httpProxyPort: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.max(65535)
|
||||
.optional()
|
||||
.describe('Port of an external HTTP proxy to use instead of starting a local one. When provided, the library will skip starting its own HTTP proxy and use this port. The external proxy must handle domain filtering.'),
|
||||
socksProxyPort: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.max(65535)
|
||||
.optional()
|
||||
.describe('Port of an external SOCKS proxy to use instead of starting a local one. When provided, the library will skip starting its own SOCKS proxy and use this port. The external proxy must handle domain filtering.'),
|
||||
mitmProxy: MitmProxyConfigSchema.optional().describe('Optional MITM proxy configuration. Routes matching domains through an upstream proxy via Unix socket while SRT still handles allow/deny filtering.'),
|
||||
});
|
||||
/**
|
||||
* Filesystem configuration schema for validation
|
||||
*/
|
||||
export const FilesystemConfigSchema = z.object({
|
||||
denyRead: z.array(filesystemPathSchema).describe('Paths denied for reading'),
|
||||
allowRead: z
|
||||
.array(filesystemPathSchema)
|
||||
.optional()
|
||||
.describe('Paths to re-allow reading within denied regions (takes precedence over denyRead). ' +
|
||||
'Use with denyRead to deny a broad region then allow back specific subdirectories.'),
|
||||
allowWrite: z
|
||||
.array(filesystemPathSchema)
|
||||
.describe('Paths allowed for writing'),
|
||||
denyWrite: z
|
||||
.array(filesystemPathSchema)
|
||||
.describe('Paths denied for writing (takes precedence over allowWrite)'),
|
||||
allowGitConfig: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Allow writes to .git/config files (default: false). Enables git remote URL updates while keeping .git/hooks protected.'),
|
||||
});
|
||||
/**
|
||||
* Configuration schema for ignoring specific sandbox violations
|
||||
* Maps command patterns to filesystem paths to ignore violations for.
|
||||
*/
|
||||
export const IgnoreViolationsConfigSchema = z
|
||||
.record(z.string(), z.array(z.string()))
|
||||
.describe('Map of command patterns to filesystem paths to ignore violations for. Use "*" to match all commands');
|
||||
/**
|
||||
* Ripgrep configuration schema
|
||||
*/
|
||||
export const RipgrepConfigSchema = z.object({
|
||||
command: z.string().describe('The ripgrep command to execute'),
|
||||
args: z
|
||||
.array(z.string())
|
||||
.optional()
|
||||
.describe('Additional arguments to pass before ripgrep args'),
|
||||
argv0: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Override argv[0] when spawning (for multicall binaries that dispatch on argv[0])'),
|
||||
});
|
||||
/**
|
||||
* Seccomp configuration schema (Linux only)
|
||||
* Allows specifying custom paths to seccomp binaries
|
||||
*/
|
||||
export const SeccompConfigSchema = z.object({
|
||||
bpfPath: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Path to the unix-block.bpf filter file'),
|
||||
applyPath: z.string().optional().describe('Path to the apply-seccomp binary'),
|
||||
});
|
||||
/**
|
||||
* Main configuration schema for Sandbox Runtime validation
|
||||
*/
|
||||
export const SandboxRuntimeConfigSchema = z.object({
|
||||
network: NetworkConfigSchema.describe('Network restrictions configuration'),
|
||||
filesystem: FilesystemConfigSchema.describe('Filesystem restrictions configuration'),
|
||||
ignoreViolations: IgnoreViolationsConfigSchema.optional().describe('Optional configuration for ignoring specific violations'),
|
||||
enableWeakerNestedSandbox: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Enable weaker nested sandbox mode (for Docker environments)'),
|
||||
enableWeakerNetworkIsolation: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Enable weaker network isolation to allow access to com.apple.trustd.agent (macOS only). ' +
|
||||
'This is needed for Go programs (gh, gcloud, terraform, kubectl, etc.) to verify TLS certificates ' +
|
||||
'when using httpProxyPort with a MITM proxy and custom CA. Enabling this opens a potential data ' +
|
||||
'exfiltration vector through the trustd service. Only enable if you need Go TLS verification.'),
|
||||
ripgrep: RipgrepConfigSchema.optional().describe('Custom ripgrep configuration (default: { command: "rg" })'),
|
||||
mandatoryDenySearchDepth: z
|
||||
.number()
|
||||
.int()
|
||||
.min(1)
|
||||
.max(10)
|
||||
.optional()
|
||||
.describe('Maximum directory depth to search for dangerous files on Linux (default: 3). ' +
|
||||
'Higher values provide more protection but slower performance.'),
|
||||
allowPty: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe('Allow pseudo-terminal (pty) operations (macOS only)'),
|
||||
seccomp: SeccompConfigSchema.optional().describe('Custom seccomp binary paths (Linux only).'),
|
||||
});
|
||||
//# sourceMappingURL=sandbox-config.js.map
|
||||
786
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-manager.js
generated
vendored
Normal file
786
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-manager.js
generated
vendored
Normal file
@@ -0,0 +1,786 @@
|
||||
import { createHttpProxyServer } from './http-proxy.js';
|
||||
import { createSocksProxyServer } from './socks-proxy.js';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
import { whichSync } from '../utils/which.js';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { getPlatform, getWslVersion } from '../utils/platform.js';
|
||||
import * as fs from 'fs';
|
||||
import { wrapCommandWithSandboxLinux, initializeLinuxNetworkBridge, checkLinuxDependencies, cleanupBwrapMountPoints, } from './linux-sandbox-utils.js';
|
||||
import { wrapCommandWithSandboxMacOS, startMacOSSandboxLogMonitor, } from './macos-sandbox-utils.js';
|
||||
import { getDefaultWritePaths, containsGlobChars, removeTrailingGlobSuffix, expandGlobPattern, } from './sandbox-utils.js';
|
||||
import { SandboxViolationStore } from './sandbox-violation-store.js';
|
||||
import { EOL } from 'node:os';
|
||||
// ============================================================================
|
||||
// Private Module State
|
||||
// ============================================================================
|
||||
let config;
|
||||
let httpProxyServer;
|
||||
let socksProxyServer;
|
||||
let managerContext;
|
||||
let initializationPromise;
|
||||
let cleanupRegistered = false;
|
||||
let logMonitorShutdown;
|
||||
const sandboxViolationStore = new SandboxViolationStore();
|
||||
// ============================================================================
|
||||
// Private Helper Functions (not exported)
|
||||
// ============================================================================
|
||||
function registerCleanup() {
|
||||
if (cleanupRegistered) {
|
||||
return;
|
||||
}
|
||||
const cleanupHandler = () => reset().catch(e => {
|
||||
logForDebugging(`Cleanup failed in registerCleanup ${e}`, {
|
||||
level: 'error',
|
||||
});
|
||||
});
|
||||
process.once('exit', cleanupHandler);
|
||||
process.once('SIGINT', cleanupHandler);
|
||||
process.once('SIGTERM', cleanupHandler);
|
||||
cleanupRegistered = true;
|
||||
}
|
||||
function matchesDomainPattern(hostname, pattern) {
|
||||
// Support wildcard patterns like *.example.com
|
||||
// This matches any subdomain but not the base domain itself
|
||||
if (pattern.startsWith('*.')) {
|
||||
const baseDomain = pattern.substring(2); // Remove '*.'
|
||||
return hostname.toLowerCase().endsWith('.' + baseDomain.toLowerCase());
|
||||
}
|
||||
// Exact match for non-wildcard patterns
|
||||
return hostname.toLowerCase() === pattern.toLowerCase();
|
||||
}
|
||||
async function filterNetworkRequest(port, host, sandboxAskCallback) {
|
||||
if (!config) {
|
||||
logForDebugging('No config available, denying network request');
|
||||
return false;
|
||||
}
|
||||
// Check denied domains first
|
||||
for (const deniedDomain of config.network.deniedDomains) {
|
||||
if (matchesDomainPattern(host, deniedDomain)) {
|
||||
logForDebugging(`Denied by config rule: ${host}:${port}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Check allowed domains
|
||||
for (const allowedDomain of config.network.allowedDomains) {
|
||||
if (matchesDomainPattern(host, allowedDomain)) {
|
||||
logForDebugging(`Allowed by config rule: ${host}:${port}`);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// No matching rules - ask user or deny
|
||||
if (!sandboxAskCallback) {
|
||||
logForDebugging(`No matching config rule, denying: ${host}:${port}`);
|
||||
return false;
|
||||
}
|
||||
logForDebugging(`No matching config rule, asking user: ${host}:${port}`);
|
||||
try {
|
||||
const userAllowed = await sandboxAskCallback({ host, port });
|
||||
if (userAllowed) {
|
||||
logForDebugging(`User allowed: ${host}:${port}`);
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
logForDebugging(`User denied: ${host}:${port}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
logForDebugging(`Error in permission callback: ${error}`, {
|
||||
level: 'error',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the MITM proxy socket path for a given host, if configured.
|
||||
* Returns the socket path if the host matches any MITM domain pattern,
|
||||
* otherwise returns undefined.
|
||||
*/
|
||||
function getMitmSocketPath(host) {
|
||||
if (!config?.network.mitmProxy) {
|
||||
return undefined;
|
||||
}
|
||||
const { socketPath, domains } = config.network.mitmProxy;
|
||||
for (const pattern of domains) {
|
||||
if (matchesDomainPattern(host, pattern)) {
|
||||
logForDebugging(`Host ${host} matches MITM pattern ${pattern}`);
|
||||
return socketPath;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
async function startHttpProxyServer(sandboxAskCallback) {
|
||||
httpProxyServer = createHttpProxyServer({
|
||||
filter: (port, host) => filterNetworkRequest(port, host, sandboxAskCallback),
|
||||
getMitmSocketPath,
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!httpProxyServer) {
|
||||
reject(new Error('HTTP proxy server undefined before listen'));
|
||||
return;
|
||||
}
|
||||
const server = httpProxyServer;
|
||||
server.once('error', reject);
|
||||
server.once('listening', () => {
|
||||
const address = server.address();
|
||||
if (address && typeof address === 'object') {
|
||||
server.unref();
|
||||
logForDebugging(`HTTP proxy listening on localhost:${address.port}`);
|
||||
resolve(address.port);
|
||||
}
|
||||
else {
|
||||
reject(new Error('Failed to get proxy server address'));
|
||||
}
|
||||
});
|
||||
server.listen(0, '127.0.0.1');
|
||||
});
|
||||
}
|
||||
async function startSocksProxyServer(sandboxAskCallback) {
|
||||
socksProxyServer = createSocksProxyServer({
|
||||
filter: (port, host) => filterNetworkRequest(port, host, sandboxAskCallback),
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!socksProxyServer) {
|
||||
// This is mostly just for the typechecker
|
||||
reject(new Error('SOCKS proxy server undefined before listen'));
|
||||
return;
|
||||
}
|
||||
socksProxyServer
|
||||
.listen(0, '127.0.0.1')
|
||||
.then((port) => {
|
||||
socksProxyServer?.unref();
|
||||
resolve(port);
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
}
|
||||
// ============================================================================
|
||||
// Public Module Functions (will be exported via namespace)
|
||||
// ============================================================================
|
||||
async function initialize(runtimeConfig, sandboxAskCallback, enableLogMonitor = false) {
|
||||
// Return if already initializing
|
||||
if (initializationPromise) {
|
||||
await initializationPromise;
|
||||
return;
|
||||
}
|
||||
// Store config for use by other functions
|
||||
config = runtimeConfig;
|
||||
// Check dependencies
|
||||
const deps = checkDependencies();
|
||||
if (deps.errors.length > 0) {
|
||||
throw new Error(`Sandbox dependencies not available: ${deps.errors.join(', ')}`);
|
||||
}
|
||||
// Start log monitor for macOS if enabled
|
||||
if (enableLogMonitor && getPlatform() === 'macos') {
|
||||
logMonitorShutdown = startMacOSSandboxLogMonitor(sandboxViolationStore.addViolation.bind(sandboxViolationStore), config.ignoreViolations);
|
||||
logForDebugging('Started macOS sandbox log monitor');
|
||||
}
|
||||
// Register cleanup handlers first time
|
||||
registerCleanup();
|
||||
// Initialize network infrastructure
|
||||
initializationPromise = (async () => {
|
||||
try {
|
||||
// Conditionally start proxy servers based on config
|
||||
let httpProxyPort;
|
||||
if (config.network.httpProxyPort !== undefined) {
|
||||
// Use external HTTP proxy (don't start a server)
|
||||
httpProxyPort = config.network.httpProxyPort;
|
||||
logForDebugging(`Using external HTTP proxy on port ${httpProxyPort}`);
|
||||
}
|
||||
else {
|
||||
// Start local HTTP proxy
|
||||
httpProxyPort = await startHttpProxyServer(sandboxAskCallback);
|
||||
}
|
||||
let socksProxyPort;
|
||||
if (config.network.socksProxyPort !== undefined) {
|
||||
// Use external SOCKS proxy (don't start a server)
|
||||
socksProxyPort = config.network.socksProxyPort;
|
||||
logForDebugging(`Using external SOCKS proxy on port ${socksProxyPort}`);
|
||||
}
|
||||
else {
|
||||
// Start local SOCKS proxy
|
||||
socksProxyPort = await startSocksProxyServer(sandboxAskCallback);
|
||||
}
|
||||
// Initialize platform-specific infrastructure
|
||||
let linuxBridge;
|
||||
if (getPlatform() === 'linux') {
|
||||
linuxBridge = await initializeLinuxNetworkBridge(httpProxyPort, socksProxyPort);
|
||||
}
|
||||
const context = {
|
||||
httpProxyPort,
|
||||
socksProxyPort,
|
||||
linuxBridge,
|
||||
};
|
||||
managerContext = context;
|
||||
logForDebugging('Network infrastructure initialized');
|
||||
return context;
|
||||
}
|
||||
catch (error) {
|
||||
// Clear state on error so initialization can be retried
|
||||
initializationPromise = undefined;
|
||||
managerContext = undefined;
|
||||
reset().catch(e => {
|
||||
logForDebugging(`Cleanup failed in initializationPromise ${e}`, {
|
||||
level: 'error',
|
||||
});
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
})();
|
||||
await initializationPromise;
|
||||
}
|
||||
function isSupportedPlatform() {
|
||||
const platform = getPlatform();
|
||||
if (platform === 'linux') {
|
||||
// WSL1 doesn't support bubblewrap
|
||||
return getWslVersion() !== '1';
|
||||
}
|
||||
return platform === 'macos';
|
||||
}
|
||||
function isSandboxingEnabled() {
|
||||
// Sandboxing is enabled if config has been set (via initialize())
|
||||
return config !== undefined;
|
||||
}
|
||||
/**
|
||||
* Check sandbox dependencies for the current platform
|
||||
* @param ripgrepConfig - Ripgrep command to check. If not provided, uses config from initialization or defaults to 'rg'
|
||||
* @returns { warnings, errors } - errors mean sandbox cannot run, warnings mean degraded functionality
|
||||
*/
|
||||
function checkDependencies(ripgrepConfig) {
|
||||
if (!isSupportedPlatform()) {
|
||||
return { errors: ['Unsupported platform'], warnings: [] };
|
||||
}
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
// Check ripgrep - use provided config, then initialized config, then default 'rg'
|
||||
const rgToCheck = ripgrepConfig ?? config?.ripgrep ?? { command: 'rg' };
|
||||
if (whichSync(rgToCheck.command) === null) {
|
||||
errors.push(`ripgrep (${rgToCheck.command}) not found`);
|
||||
}
|
||||
const platform = getPlatform();
|
||||
if (platform === 'linux') {
|
||||
const linuxDeps = checkLinuxDependencies(config?.seccomp);
|
||||
errors.push(...linuxDeps.errors);
|
||||
warnings.push(...linuxDeps.warnings);
|
||||
}
|
||||
return { errors, warnings };
|
||||
}
|
||||
function getFsReadConfig() {
|
||||
if (!config) {
|
||||
return { denyOnly: [], allowWithinDeny: [] };
|
||||
}
|
||||
const denyPaths = [];
|
||||
for (const p of config.filesystem.denyRead) {
|
||||
const stripped = removeTrailingGlobSuffix(p);
|
||||
if (getPlatform() === 'linux' && containsGlobChars(stripped)) {
|
||||
// Expand glob to concrete paths on Linux (bubblewrap doesn't support globs)
|
||||
const expanded = expandGlobPattern(p);
|
||||
logForDebugging(`[Sandbox] Expanded glob pattern "${p}" to ${expanded.length} paths on Linux`);
|
||||
denyPaths.push(...expanded);
|
||||
}
|
||||
else {
|
||||
denyPaths.push(stripped);
|
||||
}
|
||||
}
|
||||
// Process allowRead paths (re-allow within denied regions)
|
||||
const allowPaths = [];
|
||||
for (const p of config.filesystem.allowRead ?? []) {
|
||||
const stripped = removeTrailingGlobSuffix(p);
|
||||
if (getPlatform() === 'linux' && containsGlobChars(stripped)) {
|
||||
const expanded = expandGlobPattern(p);
|
||||
logForDebugging(`[Sandbox] Expanded allowRead glob pattern "${p}" to ${expanded.length} paths on Linux`);
|
||||
allowPaths.push(...expanded);
|
||||
}
|
||||
else {
|
||||
allowPaths.push(stripped);
|
||||
}
|
||||
}
|
||||
return {
|
||||
denyOnly: denyPaths,
|
||||
allowWithinDeny: allowPaths,
|
||||
};
|
||||
}
|
||||
function getFsWriteConfig() {
|
||||
if (!config) {
|
||||
return { allowOnly: getDefaultWritePaths(), denyWithinAllow: [] };
|
||||
}
|
||||
// Filter out glob patterns on Linux/WSL for allowWrite (bubblewrap doesn't support globs)
|
||||
const allowPaths = config.filesystem.allowWrite
|
||||
.map(path => removeTrailingGlobSuffix(path))
|
||||
.filter(path => {
|
||||
if (getPlatform() === 'linux' && containsGlobChars(path)) {
|
||||
logForDebugging(`Skipping glob pattern on Linux/WSL: ${path}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
// Filter out glob patterns on Linux/WSL for denyWrite (bubblewrap doesn't support globs)
|
||||
const denyPaths = config.filesystem.denyWrite
|
||||
.map(path => removeTrailingGlobSuffix(path))
|
||||
.filter(path => {
|
||||
if (getPlatform() === 'linux' && containsGlobChars(path)) {
|
||||
logForDebugging(`Skipping glob pattern on Linux/WSL: ${path}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
// Build allowOnly list: default paths + configured allow paths
|
||||
const allowOnly = [...getDefaultWritePaths(), ...allowPaths];
|
||||
return {
|
||||
allowOnly,
|
||||
denyWithinAllow: denyPaths,
|
||||
};
|
||||
}
|
||||
function getNetworkRestrictionConfig() {
|
||||
if (!config) {
|
||||
return {};
|
||||
}
|
||||
const allowedHosts = config.network.allowedDomains;
|
||||
const deniedHosts = config.network.deniedDomains;
|
||||
return {
|
||||
...(allowedHosts.length > 0 && { allowedHosts }),
|
||||
...(deniedHosts.length > 0 && { deniedHosts }),
|
||||
};
|
||||
}
|
||||
function getAllowUnixSockets() {
|
||||
return config?.network?.allowUnixSockets;
|
||||
}
|
||||
function getAllowAllUnixSockets() {
|
||||
return config?.network?.allowAllUnixSockets;
|
||||
}
|
||||
function getAllowLocalBinding() {
|
||||
return config?.network?.allowLocalBinding;
|
||||
}
|
||||
function getIgnoreViolations() {
|
||||
return config?.ignoreViolations;
|
||||
}
|
||||
function getEnableWeakerNestedSandbox() {
|
||||
return config?.enableWeakerNestedSandbox;
|
||||
}
|
||||
function getEnableWeakerNetworkIsolation() {
|
||||
return config?.enableWeakerNetworkIsolation;
|
||||
}
|
||||
function getRipgrepConfig() {
|
||||
return config?.ripgrep ?? { command: 'rg' };
|
||||
}
|
||||
function getMandatoryDenySearchDepth() {
|
||||
return config?.mandatoryDenySearchDepth ?? 3;
|
||||
}
|
||||
function getAllowGitConfig() {
|
||||
return config?.filesystem?.allowGitConfig ?? false;
|
||||
}
|
||||
function getSeccompConfig() {
|
||||
return config?.seccomp;
|
||||
}
|
||||
function getProxyPort() {
|
||||
return managerContext?.httpProxyPort;
|
||||
}
|
||||
function getSocksProxyPort() {
|
||||
return managerContext?.socksProxyPort;
|
||||
}
|
||||
function getLinuxHttpSocketPath() {
|
||||
return managerContext?.linuxBridge?.httpSocketPath;
|
||||
}
|
||||
function getLinuxSocksSocketPath() {
|
||||
return managerContext?.linuxBridge?.socksSocketPath;
|
||||
}
|
||||
/**
|
||||
* Wait for network initialization to complete if already in progress
|
||||
* Returns true if initialized successfully, false otherwise
|
||||
*/
|
||||
async function waitForNetworkInitialization() {
|
||||
if (!config) {
|
||||
return false;
|
||||
}
|
||||
if (initializationPromise) {
|
||||
try {
|
||||
await initializationPromise;
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return managerContext !== undefined;
|
||||
}
|
||||
async function wrapWithSandbox(command, binShell, customConfig, abortSignal) {
|
||||
const platform = getPlatform();
|
||||
// Get configs - use custom if provided, otherwise fall back to main config
|
||||
// If neither exists, defaults to empty arrays (most restrictive)
|
||||
// Always include default system write paths (like /dev/null, /tmp/claude)
|
||||
//
|
||||
// Strip trailing /** and filter remaining globs on Linux (bwrap needs
|
||||
// real paths, not globs; macOS subpath matching is also recursive so
|
||||
// stripping is harmless there).
|
||||
const stripWriteGlobs = (paths) => paths
|
||||
.map(p => removeTrailingGlobSuffix(p))
|
||||
.filter(p => {
|
||||
if (getPlatform() === 'linux' && containsGlobChars(p)) {
|
||||
logForDebugging(`[Sandbox] Skipping glob write pattern on Linux: ${p}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
const userAllowWrite = stripWriteGlobs(customConfig?.filesystem?.allowWrite ?? config?.filesystem.allowWrite ?? []);
|
||||
const writeConfig = {
|
||||
allowOnly: [...getDefaultWritePaths(), ...userAllowWrite],
|
||||
denyWithinAllow: stripWriteGlobs(customConfig?.filesystem?.denyWrite ?? config?.filesystem.denyWrite ?? []),
|
||||
};
|
||||
const rawDenyRead = customConfig?.filesystem?.denyRead ?? config?.filesystem.denyRead ?? [];
|
||||
const expandedDenyRead = [];
|
||||
for (const p of rawDenyRead) {
|
||||
const stripped = removeTrailingGlobSuffix(p);
|
||||
if (getPlatform() === 'linux' && containsGlobChars(stripped)) {
|
||||
expandedDenyRead.push(...expandGlobPattern(p));
|
||||
}
|
||||
else {
|
||||
expandedDenyRead.push(stripped);
|
||||
}
|
||||
}
|
||||
const rawAllowRead = customConfig?.filesystem?.allowRead ?? config?.filesystem.allowRead ?? [];
|
||||
const expandedAllowRead = [];
|
||||
for (const p of rawAllowRead) {
|
||||
const stripped = removeTrailingGlobSuffix(p);
|
||||
if (getPlatform() === 'linux' && containsGlobChars(stripped)) {
|
||||
expandedAllowRead.push(...expandGlobPattern(p));
|
||||
}
|
||||
else {
|
||||
expandedAllowRead.push(stripped);
|
||||
}
|
||||
}
|
||||
const readConfig = {
|
||||
denyOnly: expandedDenyRead,
|
||||
allowWithinDeny: expandedAllowRead,
|
||||
};
|
||||
// Check if network config is specified - this determines if we need network restrictions
|
||||
// Network restriction is needed when:
|
||||
// 1. customConfig has network.allowedDomains defined (even if empty array = block all)
|
||||
// 2. OR config has network.allowedDomains defined (even if empty array = block all)
|
||||
// An empty allowedDomains array means "no domains allowed" = block all network access
|
||||
const hasNetworkConfig = customConfig?.network?.allowedDomains !== undefined ||
|
||||
config?.network?.allowedDomains !== undefined;
|
||||
// Network RESTRICTION is needed whenever network config is specified
|
||||
// This includes empty allowedDomains which means "block all network"
|
||||
const needsNetworkRestriction = hasNetworkConfig;
|
||||
// Network PROXY is needed whenever network config is specified
|
||||
// Even with empty allowedDomains, we route through proxy so that:
|
||||
// 1. updateConfig() can enable network access for already-running processes
|
||||
// 2. The proxy blocks all requests when allowlist is empty
|
||||
const needsNetworkProxy = hasNetworkConfig;
|
||||
// Wait for network initialization only if proxy is actually needed
|
||||
if (needsNetworkProxy) {
|
||||
await waitForNetworkInitialization();
|
||||
}
|
||||
// Check custom config to allow pseudo-terminal (can be applied dynamically)
|
||||
const allowPty = customConfig?.allowPty ?? config?.allowPty;
|
||||
switch (platform) {
|
||||
case 'macos':
|
||||
// macOS sandbox profile supports glob patterns directly, no ripgrep needed
|
||||
return wrapCommandWithSandboxMacOS({
|
||||
command,
|
||||
needsNetworkRestriction,
|
||||
// Only pass proxy ports if proxy is running (when there are domains to filter)
|
||||
httpProxyPort: needsNetworkProxy ? getProxyPort() : undefined,
|
||||
socksProxyPort: needsNetworkProxy ? getSocksProxyPort() : undefined,
|
||||
readConfig,
|
||||
writeConfig,
|
||||
allowUnixSockets: getAllowUnixSockets(),
|
||||
allowAllUnixSockets: getAllowAllUnixSockets(),
|
||||
allowLocalBinding: getAllowLocalBinding(),
|
||||
ignoreViolations: getIgnoreViolations(),
|
||||
allowPty,
|
||||
allowGitConfig: getAllowGitConfig(),
|
||||
enableWeakerNetworkIsolation: getEnableWeakerNetworkIsolation(),
|
||||
binShell,
|
||||
});
|
||||
case 'linux':
|
||||
return wrapCommandWithSandboxLinux({
|
||||
command,
|
||||
needsNetworkRestriction,
|
||||
// Only pass socket paths if proxy is running (when there are domains to filter)
|
||||
httpSocketPath: needsNetworkProxy
|
||||
? getLinuxHttpSocketPath()
|
||||
: undefined,
|
||||
socksSocketPath: needsNetworkProxy
|
||||
? getLinuxSocksSocketPath()
|
||||
: undefined,
|
||||
httpProxyPort: needsNetworkProxy
|
||||
? managerContext?.httpProxyPort
|
||||
: undefined,
|
||||
socksProxyPort: needsNetworkProxy
|
||||
? managerContext?.socksProxyPort
|
||||
: undefined,
|
||||
readConfig,
|
||||
writeConfig,
|
||||
enableWeakerNestedSandbox: getEnableWeakerNestedSandbox(),
|
||||
allowAllUnixSockets: getAllowAllUnixSockets(),
|
||||
binShell,
|
||||
ripgrepConfig: getRipgrepConfig(),
|
||||
mandatoryDenySearchDepth: getMandatoryDenySearchDepth(),
|
||||
allowGitConfig: getAllowGitConfig(),
|
||||
seccompConfig: getSeccompConfig(),
|
||||
abortSignal,
|
||||
});
|
||||
default:
|
||||
// Unsupported platform - this should not happen since isSandboxingEnabled() checks platform support
|
||||
throw new Error(`Sandbox configuration is not supported on platform: ${platform}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the current sandbox configuration
|
||||
* @returns The current configuration, or undefined if not initialized
|
||||
*/
|
||||
function getConfig() {
|
||||
return config;
|
||||
}
|
||||
/**
|
||||
* Update the sandbox configuration
|
||||
* @param newConfig - The new configuration to use
|
||||
*/
|
||||
function updateConfig(newConfig) {
|
||||
// Deep clone the config to avoid mutations
|
||||
config = cloneDeep(newConfig);
|
||||
logForDebugging('Sandbox configuration updated');
|
||||
}
|
||||
/**
|
||||
* Lightweight cleanup to call after each sandboxed command completes.
|
||||
*
|
||||
* On Linux, bwrap creates empty files on the host filesystem as mount points
|
||||
* when protecting non-existent deny paths (e.g. ~/.bashrc, ~/.gitconfig).
|
||||
* These persist after bwrap exits. This function removes them.
|
||||
*
|
||||
* Safe to call on any platform — it's a no-op on macOS.
|
||||
* Also called automatically by reset() and on process exit as safety nets.
|
||||
*/
|
||||
function cleanupAfterCommand() {
|
||||
cleanupBwrapMountPoints();
|
||||
}
|
||||
async function reset() {
|
||||
// Clean up any leftover bwrap mount points
|
||||
cleanupAfterCommand();
|
||||
// Stop log monitor
|
||||
if (logMonitorShutdown) {
|
||||
logMonitorShutdown();
|
||||
logMonitorShutdown = undefined;
|
||||
}
|
||||
if (managerContext?.linuxBridge) {
|
||||
const { httpSocketPath, socksSocketPath, httpBridgeProcess, socksBridgeProcess, } = managerContext.linuxBridge;
|
||||
// Create array to wait for process exits
|
||||
const exitPromises = [];
|
||||
// Kill HTTP bridge and wait for it to exit
|
||||
if (httpBridgeProcess.pid && !httpBridgeProcess.killed) {
|
||||
try {
|
||||
process.kill(httpBridgeProcess.pid, 'SIGTERM');
|
||||
logForDebugging('Sent SIGTERM to HTTP bridge process');
|
||||
// Wait for process to exit
|
||||
exitPromises.push(new Promise(resolve => {
|
||||
httpBridgeProcess.once('exit', () => {
|
||||
logForDebugging('HTTP bridge process exited');
|
||||
resolve();
|
||||
});
|
||||
// Timeout after 5 seconds
|
||||
setTimeout(() => {
|
||||
if (!httpBridgeProcess.killed) {
|
||||
logForDebugging('HTTP bridge did not exit, forcing SIGKILL', {
|
||||
level: 'warn',
|
||||
});
|
||||
try {
|
||||
if (httpBridgeProcess.pid) {
|
||||
process.kill(httpBridgeProcess.pid, 'SIGKILL');
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
}, 5000);
|
||||
}));
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ESRCH') {
|
||||
logForDebugging(`Error killing HTTP bridge: ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Kill SOCKS bridge and wait for it to exit
|
||||
if (socksBridgeProcess.pid && !socksBridgeProcess.killed) {
|
||||
try {
|
||||
process.kill(socksBridgeProcess.pid, 'SIGTERM');
|
||||
logForDebugging('Sent SIGTERM to SOCKS bridge process');
|
||||
// Wait for process to exit
|
||||
exitPromises.push(new Promise(resolve => {
|
||||
socksBridgeProcess.once('exit', () => {
|
||||
logForDebugging('SOCKS bridge process exited');
|
||||
resolve();
|
||||
});
|
||||
// Timeout after 5 seconds
|
||||
setTimeout(() => {
|
||||
if (!socksBridgeProcess.killed) {
|
||||
logForDebugging('SOCKS bridge did not exit, forcing SIGKILL', {
|
||||
level: 'warn',
|
||||
});
|
||||
try {
|
||||
if (socksBridgeProcess.pid) {
|
||||
process.kill(socksBridgeProcess.pid, 'SIGKILL');
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
}, 5000);
|
||||
}));
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ESRCH') {
|
||||
logForDebugging(`Error killing SOCKS bridge: ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Wait for both processes to exit
|
||||
await Promise.all(exitPromises);
|
||||
// Clean up sockets
|
||||
if (httpSocketPath) {
|
||||
try {
|
||||
fs.rmSync(httpSocketPath, { force: true });
|
||||
logForDebugging('Cleaned up HTTP socket');
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`HTTP socket cleanup error: ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
if (socksSocketPath) {
|
||||
try {
|
||||
fs.rmSync(socksSocketPath, { force: true });
|
||||
logForDebugging('Cleaned up SOCKS socket');
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`SOCKS socket cleanup error: ${err}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
// Close servers in parallel (only if they exist, i.e., were started by us)
|
||||
const closePromises = [];
|
||||
if (httpProxyServer) {
|
||||
const server = httpProxyServer; // Capture reference to avoid TypeScript error
|
||||
const httpClose = new Promise(resolve => {
|
||||
server.close(error => {
|
||||
if (error && error.message !== 'Server is not running.') {
|
||||
logForDebugging(`Error closing HTTP proxy server: ${error.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
closePromises.push(httpClose);
|
||||
}
|
||||
if (socksProxyServer) {
|
||||
const socksClose = socksProxyServer.close().catch((error) => {
|
||||
logForDebugging(`Error closing SOCKS proxy server: ${error.message}`, {
|
||||
level: 'error',
|
||||
});
|
||||
});
|
||||
closePromises.push(socksClose);
|
||||
}
|
||||
// Wait for all servers to close
|
||||
await Promise.all(closePromises);
|
||||
// Clear references
|
||||
httpProxyServer = undefined;
|
||||
socksProxyServer = undefined;
|
||||
managerContext = undefined;
|
||||
initializationPromise = undefined;
|
||||
}
|
||||
function getSandboxViolationStore() {
|
||||
return sandboxViolationStore;
|
||||
}
|
||||
function annotateStderrWithSandboxFailures(command, stderr) {
|
||||
if (!config) {
|
||||
return stderr;
|
||||
}
|
||||
const violations = sandboxViolationStore.getViolationsForCommand(command);
|
||||
if (violations.length === 0) {
|
||||
return stderr;
|
||||
}
|
||||
let annotated = stderr;
|
||||
annotated += EOL + '<sandbox_violations>' + EOL;
|
||||
for (const violation of violations) {
|
||||
annotated += violation.line + EOL;
|
||||
}
|
||||
annotated += '</sandbox_violations>';
|
||||
return annotated;
|
||||
}
|
||||
/**
|
||||
* Returns glob patterns from Edit/Read permission rules that are not
|
||||
* fully supported on Linux. Returns empty array on macOS or when
|
||||
* sandboxing is disabled.
|
||||
*
|
||||
* Patterns ending with /** are excluded since they work as subpaths.
|
||||
*/
|
||||
function getLinuxGlobPatternWarnings() {
|
||||
// Only warn on Linux/WSL (bubblewrap doesn't support globs)
|
||||
// macOS supports glob patterns via regex conversion
|
||||
if (getPlatform() !== 'linux' || !config) {
|
||||
return [];
|
||||
}
|
||||
const globPatterns = [];
|
||||
// Check filesystem paths for glob patterns
|
||||
// Note: denyRead is excluded because globs are now expanded to concrete paths on Linux
|
||||
const allPaths = [
|
||||
...config.filesystem.allowWrite,
|
||||
...config.filesystem.denyWrite,
|
||||
];
|
||||
for (const path of allPaths) {
|
||||
// Strip trailing /** since that's just a subpath (directory and everything under it)
|
||||
const pathWithoutTrailingStar = removeTrailingGlobSuffix(path);
|
||||
// Only warn if there are still glob characters after removing trailing /**
|
||||
if (containsGlobChars(pathWithoutTrailingStar)) {
|
||||
globPatterns.push(path);
|
||||
}
|
||||
}
|
||||
return globPatterns;
|
||||
}
|
||||
// ============================================================================
|
||||
// Export as Namespace with Interface
|
||||
// ============================================================================
|
||||
/**
|
||||
* Global sandbox manager that handles both network and filesystem restrictions
|
||||
* for this session. This runs outside of the sandbox, on the host machine.
|
||||
*/
|
||||
export const SandboxManager = {
|
||||
initialize,
|
||||
isSupportedPlatform,
|
||||
isSandboxingEnabled,
|
||||
checkDependencies,
|
||||
getFsReadConfig,
|
||||
getFsWriteConfig,
|
||||
getNetworkRestrictionConfig,
|
||||
getAllowUnixSockets,
|
||||
getAllowLocalBinding,
|
||||
getIgnoreViolations,
|
||||
getEnableWeakerNestedSandbox,
|
||||
getProxyPort,
|
||||
getSocksProxyPort,
|
||||
getLinuxHttpSocketPath,
|
||||
getLinuxSocksSocketPath,
|
||||
waitForNetworkInitialization,
|
||||
wrapWithSandbox,
|
||||
cleanupAfterCommand,
|
||||
reset,
|
||||
getSandboxViolationStore,
|
||||
annotateStderrWithSandboxFailures,
|
||||
getLinuxGlobPatternWarnings,
|
||||
getConfig,
|
||||
updateConfig,
|
||||
};
|
||||
//# sourceMappingURL=sandbox-manager.js.map
|
||||
435
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-utils.js
generated
vendored
Normal file
435
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-utils.js
generated
vendored
Normal file
@@ -0,0 +1,435 @@
|
||||
import { homedir } from 'os';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import { getPlatform } from '../utils/platform.js';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
/**
|
||||
* Dangerous files that should be protected from writes.
|
||||
* These files can be used for code execution or data exfiltration.
|
||||
*/
|
||||
export const DANGEROUS_FILES = [
|
||||
'.gitconfig',
|
||||
'.gitmodules',
|
||||
'.bashrc',
|
||||
'.bash_profile',
|
||||
'.zshrc',
|
||||
'.zprofile',
|
||||
'.profile',
|
||||
'.ripgreprc',
|
||||
'.mcp.json',
|
||||
];
|
||||
/**
|
||||
* Dangerous directories that should be protected from writes.
|
||||
* These directories contain sensitive configuration or executable files.
|
||||
*/
|
||||
export const DANGEROUS_DIRECTORIES = ['.git', '.vscode', '.idea'];
|
||||
/**
|
||||
* Get the list of dangerous directories to deny writes to.
|
||||
* Excludes .git since we need it writable for git operations -
|
||||
* instead we block specific paths within .git (hooks and config).
|
||||
*/
|
||||
export function getDangerousDirectories() {
|
||||
return [
|
||||
...DANGEROUS_DIRECTORIES.filter(d => d !== '.git'),
|
||||
'.claude/commands',
|
||||
'.claude/agents',
|
||||
];
|
||||
}
|
||||
/**
|
||||
* Normalizes a path for case-insensitive comparison.
|
||||
* This prevents bypassing security checks using mixed-case paths on case-insensitive
|
||||
* filesystems (macOS/Windows) like `.cLauDe/Settings.locaL.json`.
|
||||
*
|
||||
* We always normalize to lowercase regardless of platform for consistent security.
|
||||
* @param path The path to normalize
|
||||
* @returns The lowercase path for safe comparison
|
||||
*/
|
||||
export function normalizeCaseForComparison(pathStr) {
|
||||
return pathStr.toLowerCase();
|
||||
}
|
||||
/**
|
||||
* Check if a path pattern contains glob characters
|
||||
*/
|
||||
export function containsGlobChars(pathPattern) {
|
||||
return (pathPattern.includes('*') ||
|
||||
pathPattern.includes('?') ||
|
||||
pathPattern.includes('[') ||
|
||||
pathPattern.includes(']'));
|
||||
}
|
||||
/**
|
||||
* Remove trailing /** glob suffix from a path pattern
|
||||
* Used to normalize path patterns since /** just means "directory and everything under it"
|
||||
*/
|
||||
export function removeTrailingGlobSuffix(pathPattern) {
|
||||
const stripped = pathPattern.replace(/\/\*\*$/, '');
|
||||
return stripped || '/';
|
||||
}
|
||||
/**
|
||||
* Check if a symlink resolution crosses expected path boundaries.
|
||||
*
|
||||
* When resolving symlinks for sandbox path normalization, we need to ensure
|
||||
* the resolved path doesn't unexpectedly broaden the scope. This function
|
||||
* returns true if the resolved path is an ancestor of the original path
|
||||
* or resolves to a system root, which would indicate the symlink points
|
||||
* outside expected boundaries.
|
||||
*
|
||||
* @param originalPath - The original path before symlink resolution
|
||||
* @param resolvedPath - The path after fs.realpathSync() resolution
|
||||
* @returns true if the resolved path is outside expected boundaries
|
||||
*/
|
||||
export function isSymlinkOutsideBoundary(originalPath, resolvedPath) {
|
||||
const normalizedOriginal = path.normalize(originalPath);
|
||||
const normalizedResolved = path.normalize(resolvedPath);
|
||||
// Same path after normalization - OK
|
||||
if (normalizedResolved === normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
// Handle macOS /tmp -> /private/tmp canonical resolution
|
||||
// This is a legitimate system symlink that should be allowed
|
||||
// /tmp/claude -> /private/tmp/claude is OK
|
||||
// /var/folders/... -> /private/var/folders/... is OK
|
||||
if (normalizedOriginal.startsWith('/tmp/') &&
|
||||
normalizedResolved === '/private' + normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
if (normalizedOriginal.startsWith('/var/') &&
|
||||
normalizedResolved === '/private' + normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
// Also handle the reverse: /private/tmp/... resolving to itself
|
||||
if (normalizedOriginal.startsWith('/private/tmp/') &&
|
||||
normalizedResolved === normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
if (normalizedOriginal.startsWith('/private/var/') &&
|
||||
normalizedResolved === normalizedOriginal) {
|
||||
return false;
|
||||
}
|
||||
// If resolved path is "/" it's outside expected boundaries
|
||||
if (normalizedResolved === '/') {
|
||||
return true;
|
||||
}
|
||||
// If resolved path is very short (single component like /tmp, /usr, /var),
|
||||
// it's likely outside expected boundaries
|
||||
const resolvedParts = normalizedResolved.split('/').filter(Boolean);
|
||||
if (resolvedParts.length <= 1) {
|
||||
return true;
|
||||
}
|
||||
// If original path starts with resolved path, the resolved path is an ancestor
|
||||
// e.g., /tmp/claude -> /tmp means the symlink points to a broader scope
|
||||
if (normalizedOriginal.startsWith(normalizedResolved + '/')) {
|
||||
return true;
|
||||
}
|
||||
// Also check the canonical form of the original path for macOS
|
||||
// e.g., /tmp/claude should also be checked as /private/tmp/claude
|
||||
let canonicalOriginal = normalizedOriginal;
|
||||
if (normalizedOriginal.startsWith('/tmp/')) {
|
||||
canonicalOriginal = '/private' + normalizedOriginal;
|
||||
}
|
||||
else if (normalizedOriginal.startsWith('/var/')) {
|
||||
canonicalOriginal = '/private' + normalizedOriginal;
|
||||
}
|
||||
if (canonicalOriginal !== normalizedOriginal &&
|
||||
canonicalOriginal.startsWith(normalizedResolved + '/')) {
|
||||
return true;
|
||||
}
|
||||
// STRICT CHECK: Only allow resolutions that stay within the expected path tree
|
||||
// The resolved path must either:
|
||||
// 1. Start with the original path (deeper/same) - already covered by returning false below
|
||||
// 2. Start with the canonical original (deeper/same under canonical form)
|
||||
// 3. BE the canonical form of the original (e.g., /tmp/x -> /private/tmp/x)
|
||||
// Any other resolution (e.g., /tmp/claude -> /Users/dworken) is outside expected bounds
|
||||
const resolvedStartsWithOriginal = normalizedResolved.startsWith(normalizedOriginal + '/');
|
||||
const resolvedStartsWithCanonical = canonicalOriginal !== normalizedOriginal &&
|
||||
normalizedResolved.startsWith(canonicalOriginal + '/');
|
||||
const resolvedIsCanonical = canonicalOriginal !== normalizedOriginal &&
|
||||
normalizedResolved === canonicalOriginal;
|
||||
const resolvedIsSame = normalizedResolved === normalizedOriginal;
|
||||
// If resolved path is not within expected tree, it's outside boundary
|
||||
if (!resolvedIsSame &&
|
||||
!resolvedIsCanonical &&
|
||||
!resolvedStartsWithOriginal &&
|
||||
!resolvedStartsWithCanonical) {
|
||||
return true;
|
||||
}
|
||||
// Allow resolution to same directory level or deeper within expected tree
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Normalize a path for use in sandbox configurations
|
||||
* Handles:
|
||||
* - Tilde (~) expansion for home directory
|
||||
* - Relative paths (./foo, ../foo, etc.) converted to absolute
|
||||
* - Absolute paths remain unchanged
|
||||
* - Symlinks are resolved to their real paths for non-glob patterns
|
||||
* - Glob patterns preserve wildcards after path normalization
|
||||
*
|
||||
* Returns the absolute path with symlinks resolved (or normalized glob pattern)
|
||||
*/
|
||||
export function normalizePathForSandbox(pathPattern) {
|
||||
const cwd = process.cwd();
|
||||
let normalizedPath = pathPattern;
|
||||
// Expand ~ to home directory
|
||||
if (pathPattern === '~') {
|
||||
normalizedPath = homedir();
|
||||
}
|
||||
else if (pathPattern.startsWith('~/')) {
|
||||
normalizedPath = homedir() + pathPattern.slice(1);
|
||||
}
|
||||
else if (pathPattern.startsWith('./') || pathPattern.startsWith('../')) {
|
||||
// Convert relative to absolute based on current working directory
|
||||
normalizedPath = path.resolve(cwd, pathPattern);
|
||||
}
|
||||
else if (!path.isAbsolute(pathPattern)) {
|
||||
// Handle other relative paths (e.g., ".", "..", "foo/bar")
|
||||
normalizedPath = path.resolve(cwd, pathPattern);
|
||||
}
|
||||
// For glob patterns, resolve symlinks for the directory portion only
|
||||
if (containsGlobChars(normalizedPath)) {
|
||||
// Extract the static directory prefix before glob characters
|
||||
const staticPrefix = normalizedPath.split(/[*?[\]]/)[0];
|
||||
if (staticPrefix && staticPrefix !== '/') {
|
||||
// Get the directory containing the glob pattern
|
||||
// If staticPrefix ends with /, remove it to get the directory
|
||||
const baseDir = staticPrefix.endsWith('/')
|
||||
? staticPrefix.slice(0, -1)
|
||||
: path.dirname(staticPrefix);
|
||||
// Try to resolve symlinks for the base directory
|
||||
try {
|
||||
const resolvedBaseDir = fs.realpathSync(baseDir);
|
||||
// Validate that resolution stays within expected boundaries
|
||||
if (!isSymlinkOutsideBoundary(baseDir, resolvedBaseDir)) {
|
||||
// Reconstruct the pattern with the resolved directory
|
||||
const patternSuffix = normalizedPath.slice(baseDir.length);
|
||||
return resolvedBaseDir + patternSuffix;
|
||||
}
|
||||
// If resolution would broaden scope, keep original pattern
|
||||
}
|
||||
catch {
|
||||
// If directory doesn't exist or can't be resolved, keep the original pattern
|
||||
}
|
||||
}
|
||||
return normalizedPath;
|
||||
}
|
||||
// Resolve symlinks to real paths to avoid bwrap issues
|
||||
// Validate that the resolution stays within expected boundaries
|
||||
try {
|
||||
const resolvedPath = fs.realpathSync(normalizedPath);
|
||||
// Only use resolved path if it doesn't cross boundary (e.g., symlink to parent dir)
|
||||
if (isSymlinkOutsideBoundary(normalizedPath, resolvedPath)) {
|
||||
// Symlink points outside expected boundaries - keep original path
|
||||
}
|
||||
else {
|
||||
normalizedPath = resolvedPath;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// If path doesn't exist or can't be resolved, keep the normalized path
|
||||
}
|
||||
return normalizedPath;
|
||||
}
|
||||
/**
|
||||
* Get recommended system paths that should be writable for commands to work properly
|
||||
*
|
||||
* WARNING: These default paths are intentionally broad for compatibility but may
|
||||
* allow access to files from other processes. In highly security-sensitive
|
||||
* environments, you should configure more restrictive write paths.
|
||||
*/
|
||||
export function getDefaultWritePaths() {
|
||||
const homeDir = homedir();
|
||||
const recommendedPaths = [
|
||||
'/dev/stdout',
|
||||
'/dev/stderr',
|
||||
'/dev/null',
|
||||
'/dev/tty',
|
||||
'/dev/dtracehelper',
|
||||
'/dev/autofs_nowait',
|
||||
'/tmp/claude',
|
||||
'/private/tmp/claude',
|
||||
path.join(homeDir, '.npm/_logs'),
|
||||
path.join(homeDir, '.claude/debug'),
|
||||
];
|
||||
return recommendedPaths;
|
||||
}
|
||||
/**
|
||||
* Generate proxy environment variables for sandboxed processes
|
||||
*/
|
||||
export function generateProxyEnvVars(httpProxyPort, socksProxyPort) {
|
||||
// Respect CLAUDE_TMPDIR if set, otherwise default to /tmp/claude
|
||||
const tmpdir = process.env.CLAUDE_TMPDIR || '/tmp/claude';
|
||||
const envVars = [`SANDBOX_RUNTIME=1`, `TMPDIR=${tmpdir}`];
|
||||
// If no proxy ports provided, return minimal env vars
|
||||
if (!httpProxyPort && !socksProxyPort) {
|
||||
return envVars;
|
||||
}
|
||||
// Always set NO_PROXY to exclude localhost and private networks from proxying
|
||||
const noProxyAddresses = [
|
||||
'localhost',
|
||||
'127.0.0.1',
|
||||
'::1',
|
||||
'*.local',
|
||||
'.local',
|
||||
'169.254.0.0/16', // Link-local
|
||||
'10.0.0.0/8', // Private network
|
||||
'172.16.0.0/12', // Private network
|
||||
'192.168.0.0/16', // Private network
|
||||
].join(',');
|
||||
envVars.push(`NO_PROXY=${noProxyAddresses}`);
|
||||
envVars.push(`no_proxy=${noProxyAddresses}`);
|
||||
if (httpProxyPort) {
|
||||
envVars.push(`HTTP_PROXY=http://localhost:${httpProxyPort}`);
|
||||
envVars.push(`HTTPS_PROXY=http://localhost:${httpProxyPort}`);
|
||||
// Lowercase versions for compatibility with some tools
|
||||
envVars.push(`http_proxy=http://localhost:${httpProxyPort}`);
|
||||
envVars.push(`https_proxy=http://localhost:${httpProxyPort}`);
|
||||
}
|
||||
if (socksProxyPort) {
|
||||
// Use socks5h:// for proper DNS resolution through proxy
|
||||
envVars.push(`ALL_PROXY=socks5h://localhost:${socksProxyPort}`);
|
||||
envVars.push(`all_proxy=socks5h://localhost:${socksProxyPort}`);
|
||||
// Configure Git to use SSH through the proxy so DNS resolution happens outside the sandbox
|
||||
const platform = getPlatform();
|
||||
if (platform === 'macos') {
|
||||
// macOS: use BSD nc SOCKS5 proxy support (-X 5 -x)
|
||||
envVars.push(`GIT_SSH_COMMAND=ssh -o ProxyCommand='nc -X 5 -x localhost:${socksProxyPort} %h %p'`);
|
||||
}
|
||||
else if (platform === 'linux' && httpProxyPort) {
|
||||
// Linux: use socat HTTP CONNECT via the HTTP proxy bridge.
|
||||
// socat is already a required Linux sandbox dependency, and PROXY: is
|
||||
// portable across all socat versions (unlike SOCKS5-CONNECT which needs >= 1.8.0).
|
||||
envVars.push(`GIT_SSH_COMMAND=ssh -o ProxyCommand='socat - PROXY:localhost:%h:%p,proxyport=${httpProxyPort}'`);
|
||||
}
|
||||
// FTP proxy support (use socks5h for DNS resolution through proxy)
|
||||
envVars.push(`FTP_PROXY=socks5h://localhost:${socksProxyPort}`);
|
||||
envVars.push(`ftp_proxy=socks5h://localhost:${socksProxyPort}`);
|
||||
// rsync proxy support
|
||||
envVars.push(`RSYNC_PROXY=localhost:${socksProxyPort}`);
|
||||
// Database tools NOTE: Most database clients don't have built-in proxy support
|
||||
// You typically need to use SSH tunneling or a SOCKS wrapper like tsocks/proxychains
|
||||
// Docker CLI uses HTTP for the API
|
||||
// This makes Docker use the HTTP proxy for registry operations
|
||||
envVars.push(`DOCKER_HTTP_PROXY=http://localhost:${httpProxyPort || socksProxyPort}`);
|
||||
envVars.push(`DOCKER_HTTPS_PROXY=http://localhost:${httpProxyPort || socksProxyPort}`);
|
||||
// Kubernetes kubectl - uses standard HTTPS_PROXY
|
||||
// kubectl respects HTTPS_PROXY which we already set above
|
||||
// AWS CLI - uses standard HTTPS_PROXY (v2 supports it well)
|
||||
// AWS CLI v2 respects HTTPS_PROXY which we already set above
|
||||
// Google Cloud SDK - has specific proxy settings
|
||||
// Use HTTPS proxy to match other HTTP-based tools
|
||||
if (httpProxyPort) {
|
||||
envVars.push(`CLOUDSDK_PROXY_TYPE=https`);
|
||||
envVars.push(`CLOUDSDK_PROXY_ADDRESS=localhost`);
|
||||
envVars.push(`CLOUDSDK_PROXY_PORT=${httpProxyPort}`);
|
||||
}
|
||||
// Azure CLI - uses HTTPS_PROXY
|
||||
// Azure CLI respects HTTPS_PROXY which we already set above
|
||||
// Terraform - uses standard HTTP/HTTPS proxy vars
|
||||
// Terraform respects HTTP_PROXY/HTTPS_PROXY which we already set above
|
||||
// gRPC-based tools - use standard proxy vars
|
||||
envVars.push(`GRPC_PROXY=socks5h://localhost:${socksProxyPort}`);
|
||||
envVars.push(`grpc_proxy=socks5h://localhost:${socksProxyPort}`);
|
||||
}
|
||||
// WARNING: Do not set HTTP_PROXY/HTTPS_PROXY to SOCKS URLs when only SOCKS proxy is available
|
||||
// Most HTTP clients do not support SOCKS URLs in these variables and will fail, and we want
|
||||
// to avoid overriding the client otherwise respecting the ALL_PROXY env var which points to SOCKS.
|
||||
return envVars;
|
||||
}
|
||||
/**
|
||||
* Encode a command for sandbox monitoring
|
||||
* Truncates to 100 chars and base64 encodes to avoid parsing issues
|
||||
*/
|
||||
export function encodeSandboxedCommand(command) {
|
||||
const truncatedCommand = command.slice(0, 100);
|
||||
return Buffer.from(truncatedCommand).toString('base64');
|
||||
}
|
||||
/**
|
||||
* Decode a base64-encoded command from sandbox monitoring
|
||||
*/
|
||||
export function decodeSandboxedCommand(encodedCommand) {
|
||||
return Buffer.from(encodedCommand, 'base64').toString('utf8');
|
||||
}
|
||||
/**
|
||||
* Convert a glob pattern to a regular expression
|
||||
*
|
||||
* This implements gitignore-style pattern matching to match the behavior of the
|
||||
* `ignore` library used by the permission system.
|
||||
*
|
||||
* Supported patterns:
|
||||
* - * matches any characters except / (e.g., *.ts matches foo.ts but not foo/bar.ts)
|
||||
* - ** matches any characters including / (e.g., src/**\/*.ts matches all .ts files in src/)
|
||||
* - ? matches any single character except / (e.g., file?.txt matches file1.txt)
|
||||
* - [abc] matches any character in the set (e.g., file[0-9].txt matches file3.txt)
|
||||
*
|
||||
* Exported for testing and shared between macOS sandbox profiles and Linux glob expansion.
|
||||
*/
|
||||
export function globToRegex(globPattern) {
|
||||
return ('^' +
|
||||
globPattern
|
||||
// Escape regex special characters (except glob chars * ? [ ])
|
||||
.replace(/[.^$+{}()|\\]/g, '\\$&')
|
||||
// Escape unclosed brackets (no matching ])
|
||||
.replace(/\[([^\]]*?)$/g, '\\[$1')
|
||||
// Convert glob patterns to regex (order matters - ** before *)
|
||||
.replace(/\*\*\//g, '__GLOBSTAR_SLASH__') // Placeholder for **/
|
||||
.replace(/\*\*/g, '__GLOBSTAR__') // Placeholder for **
|
||||
.replace(/\*/g, '[^/]*') // * matches anything except /
|
||||
.replace(/\?/g, '[^/]') // ? matches single character except /
|
||||
// Restore placeholders
|
||||
.replace(/__GLOBSTAR_SLASH__/g, '(.*/)?') // **/ matches zero or more dirs
|
||||
.replace(/__GLOBSTAR__/g, '.*') + // ** matches anything including /
|
||||
'$');
|
||||
}
|
||||
/**
|
||||
* Expand a glob pattern into concrete file paths.
|
||||
*
|
||||
* Used on Linux where bubblewrap doesn't support glob patterns natively.
|
||||
* Resolves the static directory prefix, lists files recursively, and filters
|
||||
* using globToRegex().
|
||||
*
|
||||
* @param globPath - A path pattern containing glob characters (e.g., ~/test/*.env)
|
||||
* @returns Array of absolute paths matching the glob pattern
|
||||
*/
|
||||
export function expandGlobPattern(globPath) {
|
||||
const normalizedPattern = normalizePathForSandbox(globPath);
|
||||
// Extract the static directory prefix before any glob characters
|
||||
const staticPrefix = normalizedPattern.split(/[*?[\]]/)[0];
|
||||
if (!staticPrefix || staticPrefix === '/') {
|
||||
logForDebugging(`[Sandbox] Glob pattern too broad, skipping: ${globPath}`);
|
||||
return [];
|
||||
}
|
||||
// Get the base directory from the static prefix
|
||||
const baseDir = staticPrefix.endsWith('/')
|
||||
? staticPrefix.slice(0, -1)
|
||||
: path.dirname(staticPrefix);
|
||||
if (!fs.existsSync(baseDir)) {
|
||||
logForDebugging(`[Sandbox] Base directory for glob does not exist: ${baseDir}`);
|
||||
return [];
|
||||
}
|
||||
// Build regex from the normalized glob pattern
|
||||
const regex = new RegExp(globToRegex(normalizedPattern));
|
||||
// List all entries recursively under the base directory
|
||||
const results = [];
|
||||
try {
|
||||
const entries = fs.readdirSync(baseDir, {
|
||||
recursive: true,
|
||||
withFileTypes: true,
|
||||
});
|
||||
for (const entry of entries) {
|
||||
// Build the full path for this entry
|
||||
// entry.parentPath is the directory containing this entry (available in Node 20+/Bun)
|
||||
// For compatibility, fall back to entry.path if parentPath is not available
|
||||
const parentDir = entry.parentPath ??
|
||||
entry.path ??
|
||||
baseDir;
|
||||
const fullPath = path.join(parentDir, entry.name);
|
||||
if (regex.test(fullPath)) {
|
||||
results.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
logForDebugging(`[Sandbox] Error expanding glob pattern ${globPath}: ${err}`);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
//# sourceMappingURL=sandbox-utils.js.map
|
||||
54
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-violation-store.js
generated
vendored
Normal file
54
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/sandbox-violation-store.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
import { encodeSandboxedCommand } from './sandbox-utils.js';
|
||||
/**
|
||||
* In-memory tail for sandbox violations
|
||||
*/
|
||||
export class SandboxViolationStore {
|
||||
constructor() {
|
||||
this.violations = [];
|
||||
this.totalCount = 0;
|
||||
this.maxSize = 100;
|
||||
this.listeners = new Set();
|
||||
}
|
||||
addViolation(violation) {
|
||||
this.violations.push(violation);
|
||||
this.totalCount++;
|
||||
if (this.violations.length > this.maxSize) {
|
||||
this.violations = this.violations.slice(-this.maxSize);
|
||||
}
|
||||
this.notifyListeners();
|
||||
}
|
||||
getViolations(limit) {
|
||||
if (limit === undefined) {
|
||||
return [...this.violations];
|
||||
}
|
||||
return this.violations.slice(-limit);
|
||||
}
|
||||
getCount() {
|
||||
return this.violations.length;
|
||||
}
|
||||
getTotalCount() {
|
||||
return this.totalCount;
|
||||
}
|
||||
getViolationsForCommand(command) {
|
||||
const commandBase64 = encodeSandboxedCommand(command);
|
||||
return this.violations.filter(v => v.encodedCommand === commandBase64);
|
||||
}
|
||||
clear() {
|
||||
this.violations = [];
|
||||
// Don't reset totalCount when clearing
|
||||
this.notifyListeners();
|
||||
}
|
||||
subscribe(listener) {
|
||||
this.listeners.add(listener);
|
||||
listener(this.getViolations());
|
||||
return () => {
|
||||
this.listeners.delete(listener);
|
||||
};
|
||||
}
|
||||
notifyListeners() {
|
||||
// Always notify with all violations so listeners can track the full count
|
||||
const violations = this.getViolations();
|
||||
this.listeners.forEach(listener => listener(violations));
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=sandbox-violation-store.js.map
|
||||
95
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/socks-proxy.js
generated
vendored
Normal file
95
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/sandbox/socks-proxy.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
import { createServer } from '@pondwader/socks5-server';
|
||||
import { logForDebugging } from '../utils/debug.js';
|
||||
export function createSocksProxyServer(options) {
|
||||
const socksServer = createServer();
|
||||
socksServer.setRulesetValidator(async (conn) => {
|
||||
try {
|
||||
const hostname = conn.destAddress;
|
||||
const port = conn.destPort;
|
||||
logForDebugging(`Connection request to ${hostname}:${port}`);
|
||||
const allowed = await options.filter(port, hostname);
|
||||
if (!allowed) {
|
||||
logForDebugging(`Connection blocked to ${hostname}:${port}`, {
|
||||
level: 'error',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
logForDebugging(`Connection allowed to ${hostname}:${port}`);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
logForDebugging(`Error validating connection: ${error}`, {
|
||||
level: 'error',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
});
|
||||
return {
|
||||
server: socksServer,
|
||||
getPort() {
|
||||
// Access the internal server to get the port
|
||||
// We need to use type assertion here as the server property is private
|
||||
try {
|
||||
const serverInternal = socksServer?.server;
|
||||
if (serverInternal && typeof serverInternal?.address === 'function') {
|
||||
const address = serverInternal.address();
|
||||
if (address && typeof address === 'object' && 'port' in address) {
|
||||
return address.port;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Server might not be listening yet or property access failed
|
||||
logForDebugging(`Error getting port: ${error}`, { level: 'error' });
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
listen(port, hostname) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const listeningCallback = () => {
|
||||
const actualPort = this.getPort();
|
||||
if (actualPort) {
|
||||
logForDebugging(`SOCKS proxy listening on ${hostname}:${actualPort}`);
|
||||
resolve(actualPort);
|
||||
}
|
||||
else {
|
||||
reject(new Error('Failed to get SOCKS proxy server port'));
|
||||
}
|
||||
};
|
||||
socksServer.listen(port, hostname, listeningCallback);
|
||||
});
|
||||
},
|
||||
async close() {
|
||||
return new Promise((resolve, reject) => {
|
||||
socksServer.close(error => {
|
||||
if (error) {
|
||||
// Only reject for actual errors, not for "already closed" states
|
||||
// Check for common "already closed" error patterns
|
||||
const errorMessage = error.message?.toLowerCase() || '';
|
||||
const isAlreadyClosed = errorMessage.includes('not running') ||
|
||||
errorMessage.includes('already closed') ||
|
||||
errorMessage.includes('not listening');
|
||||
if (!isAlreadyClosed) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
},
|
||||
unref() {
|
||||
// Access the internal server to call unref
|
||||
try {
|
||||
const serverInternal = socksServer?.server;
|
||||
if (serverInternal && typeof serverInternal?.unref === 'function') {
|
||||
serverInternal.unref();
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
logForDebugging(`Error calling unref: ${error}`, { level: 'error' });
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=socks-proxy.js.map
|
||||
25
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/debug.js
generated
vendored
Normal file
25
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/debug.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
/**
|
||||
* Simple debug logging for standalone sandbox
|
||||
*/
|
||||
export function logForDebugging(message, options) {
|
||||
// Only log if SRT_DEBUG environment variable is set
|
||||
// Using SRT_DEBUG instead of DEBUG to avoid conflicts with other tools
|
||||
// (DEBUG is commonly used by Node.js debug libraries and VS Code)
|
||||
if (!process.env.SRT_DEBUG) {
|
||||
return;
|
||||
}
|
||||
const level = options?.level || 'info';
|
||||
const prefix = '[SandboxDebug]';
|
||||
// Always use stderr to avoid corrupting stdout JSON streams
|
||||
switch (level) {
|
||||
case 'error':
|
||||
console.error(`${prefix} ${message}`);
|
||||
break;
|
||||
case 'warn':
|
||||
console.warn(`${prefix} ${message}`);
|
||||
break;
|
||||
default:
|
||||
console.error(`${prefix} ${message}`);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=debug.js.map
|
||||
49
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/platform.js
generated
vendored
Normal file
49
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/platform.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* Platform detection utilities
|
||||
*/
|
||||
import * as fs from 'fs';
|
||||
/**
|
||||
* Get the WSL version (1 or 2+) if running in WSL.
|
||||
* Returns undefined if not running in WSL.
|
||||
*/
|
||||
export function getWslVersion() {
|
||||
if (process.platform !== 'linux') {
|
||||
return undefined;
|
||||
}
|
||||
try {
|
||||
const procVersion = fs.readFileSync('/proc/version', { encoding: 'utf8' });
|
||||
// Check for explicit WSL version markers (e.g., "WSL2", "WSL3", etc.)
|
||||
const wslVersionMatch = procVersion.match(/WSL(\d+)/i);
|
||||
if (wslVersionMatch && wslVersionMatch[1]) {
|
||||
return wslVersionMatch[1];
|
||||
}
|
||||
// If no explicit WSL version but contains Microsoft, assume WSL1
|
||||
// This handles the original WSL1 format: "4.4.0-19041-Microsoft"
|
||||
if (procVersion.toLowerCase().includes('microsoft')) {
|
||||
return '1';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Detect the current platform.
|
||||
* Note: All Linux including WSL returns 'linux'. Use getWslVersion() to detect WSL1 (unsupported).
|
||||
*/
|
||||
export function getPlatform() {
|
||||
switch (process.platform) {
|
||||
case 'darwin':
|
||||
return 'macos';
|
||||
case 'linux':
|
||||
// WSL2+ is treated as Linux (same sandboxing)
|
||||
// WSL1 is also returned as 'linux' but will fail isSupportedPlatform check
|
||||
return 'linux';
|
||||
case 'win32':
|
||||
return 'windows';
|
||||
default:
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=platform.js.map
|
||||
45
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/ripgrep.js
generated
vendored
Normal file
45
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/ripgrep.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { text } from 'node:stream/consumers';
|
||||
import { whichSync } from './which.js';
|
||||
/**
|
||||
* Check if ripgrep (rg) is available synchronously
|
||||
* Returns true if rg is installed, false otherwise
|
||||
*/
|
||||
export function hasRipgrepSync() {
|
||||
return whichSync('rg') !== null;
|
||||
}
|
||||
/**
|
||||
* Execute ripgrep with the given arguments
|
||||
* @param args Command-line arguments to pass to rg
|
||||
* @param target Target directory or file to search
|
||||
* @param abortSignal AbortSignal to cancel the operation
|
||||
* @param config Ripgrep configuration (command and optional args)
|
||||
* @returns Array of matching lines (one per line of output)
|
||||
* @throws Error if ripgrep exits with non-zero status (except exit code 1 which means no matches)
|
||||
*/
|
||||
export async function ripGrep(args, target, abortSignal, config = { command: 'rg' }) {
|
||||
const { command, args: commandArgs = [], argv0 } = config;
|
||||
const child = spawn(command, [...commandArgs, ...args, target], {
|
||||
argv0,
|
||||
signal: abortSignal,
|
||||
timeout: 10000,
|
||||
windowsHide: true,
|
||||
});
|
||||
const [stdout, stderr, code] = await Promise.all([
|
||||
text(child.stdout),
|
||||
text(child.stderr),
|
||||
new Promise((resolve, reject) => {
|
||||
child.on('close', resolve);
|
||||
child.on('error', reject);
|
||||
}),
|
||||
]);
|
||||
if (code === 0) {
|
||||
return stdout.trim().split('\n').filter(Boolean);
|
||||
}
|
||||
if (code === 1) {
|
||||
// Exit code 1 means "no matches found" - this is normal
|
||||
return [];
|
||||
}
|
||||
throw new Error(`ripgrep failed with exit code ${code}: ${stderr}`);
|
||||
}
|
||||
//# sourceMappingURL=ripgrep.js.map
|
||||
25
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/which.js
generated
vendored
Normal file
25
extracted-source/node_modules/@anthropic-ai/sandbox-runtime/dist/utils/which.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
import { spawnSync } from 'node:child_process';
|
||||
/**
|
||||
* Find the path to an executable, similar to the `which` command.
|
||||
* Uses Bun.which when running in Bun, falls back to spawnSync for Node.js.
|
||||
*
|
||||
* @param bin - The name of the executable to find
|
||||
* @returns The full path to the executable, or null if not found
|
||||
*/
|
||||
export function whichSync(bin) {
|
||||
// Check if we're running in Bun
|
||||
if (typeof globalThis.Bun !== 'undefined') {
|
||||
return globalThis.Bun.which(bin);
|
||||
}
|
||||
// Fallback to Node.js implementation
|
||||
const result = spawnSync('which', [bin], {
|
||||
encoding: 'utf8',
|
||||
stdio: ['ignore', 'pipe', 'ignore'],
|
||||
timeout: 1000,
|
||||
});
|
||||
if (result.status === 0 && result.stdout) {
|
||||
return result.stdout.trim();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
//# sourceMappingURL=which.js.map
|
||||
223
extracted-source/node_modules/@anthropic-ai/sdk/_vendor/partial-json-parser/parser.mjs
generated
vendored
Normal file
223
extracted-source/node_modules/@anthropic-ai/sdk/_vendor/partial-json-parser/parser.mjs
generated
vendored
Normal file
@@ -0,0 +1,223 @@
|
||||
const tokenize = (input) => {
|
||||
let current = 0;
|
||||
let tokens = [];
|
||||
while (current < input.length) {
|
||||
let char = input[current];
|
||||
if (char === '\\') {
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === '{') {
|
||||
tokens.push({
|
||||
type: 'brace',
|
||||
value: '{',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === '}') {
|
||||
tokens.push({
|
||||
type: 'brace',
|
||||
value: '}',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === '[') {
|
||||
tokens.push({
|
||||
type: 'paren',
|
||||
value: '[',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === ']') {
|
||||
tokens.push({
|
||||
type: 'paren',
|
||||
value: ']',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === ':') {
|
||||
tokens.push({
|
||||
type: 'separator',
|
||||
value: ':',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === ',') {
|
||||
tokens.push({
|
||||
type: 'delimiter',
|
||||
value: ',',
|
||||
});
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
if (char === '"') {
|
||||
let value = '';
|
||||
let danglingQuote = false;
|
||||
char = input[++current];
|
||||
while (char !== '"') {
|
||||
if (current === input.length) {
|
||||
danglingQuote = true;
|
||||
break;
|
||||
}
|
||||
if (char === '\\') {
|
||||
current++;
|
||||
if (current === input.length) {
|
||||
danglingQuote = true;
|
||||
break;
|
||||
}
|
||||
value += char + input[current];
|
||||
char = input[++current];
|
||||
}
|
||||
else {
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
}
|
||||
char = input[++current];
|
||||
if (!danglingQuote) {
|
||||
tokens.push({
|
||||
type: 'string',
|
||||
value,
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
let WHITESPACE = /\s/;
|
||||
if (char && WHITESPACE.test(char)) {
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
let NUMBERS = /[0-9]/;
|
||||
if ((char && NUMBERS.test(char)) || char === '-' || char === '.') {
|
||||
let value = '';
|
||||
if (char === '-') {
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
while ((char && NUMBERS.test(char)) || char === '.') {
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
tokens.push({
|
||||
type: 'number',
|
||||
value,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
let LETTERS = /[a-z]/i;
|
||||
if (char && LETTERS.test(char)) {
|
||||
let value = '';
|
||||
while (char && LETTERS.test(char)) {
|
||||
if (current === input.length) {
|
||||
break;
|
||||
}
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
if (value == 'true' || value == 'false' || value === 'null') {
|
||||
tokens.push({
|
||||
type: 'name',
|
||||
value,
|
||||
});
|
||||
}
|
||||
else {
|
||||
// unknown token, e.g. `nul` which isn't quite `null`
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
current++;
|
||||
}
|
||||
return tokens;
|
||||
}, strip = (tokens) => {
|
||||
if (tokens.length === 0) {
|
||||
return tokens;
|
||||
}
|
||||
let lastToken = tokens[tokens.length - 1];
|
||||
switch (lastToken.type) {
|
||||
case 'separator':
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
break;
|
||||
case 'number':
|
||||
let lastCharacterOfLastToken = lastToken.value[lastToken.value.length - 1];
|
||||
if (lastCharacterOfLastToken === '.' || lastCharacterOfLastToken === '-') {
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
}
|
||||
case 'string':
|
||||
let tokenBeforeTheLastToken = tokens[tokens.length - 2];
|
||||
if (tokenBeforeTheLastToken?.type === 'delimiter') {
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
}
|
||||
else if (tokenBeforeTheLastToken?.type === 'brace' && tokenBeforeTheLastToken.value === '{') {
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
}
|
||||
break;
|
||||
case 'delimiter':
|
||||
tokens = tokens.slice(0, tokens.length - 1);
|
||||
return strip(tokens);
|
||||
break;
|
||||
}
|
||||
return tokens;
|
||||
}, unstrip = (tokens) => {
|
||||
let tail = [];
|
||||
tokens.map((token) => {
|
||||
if (token.type === 'brace') {
|
||||
if (token.value === '{') {
|
||||
tail.push('}');
|
||||
}
|
||||
else {
|
||||
tail.splice(tail.lastIndexOf('}'), 1);
|
||||
}
|
||||
}
|
||||
if (token.type === 'paren') {
|
||||
if (token.value === '[') {
|
||||
tail.push(']');
|
||||
}
|
||||
else {
|
||||
tail.splice(tail.lastIndexOf(']'), 1);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (tail.length > 0) {
|
||||
tail.reverse().map((item) => {
|
||||
if (item === '}') {
|
||||
tokens.push({
|
||||
type: 'brace',
|
||||
value: '}',
|
||||
});
|
||||
}
|
||||
else if (item === ']') {
|
||||
tokens.push({
|
||||
type: 'paren',
|
||||
value: ']',
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
return tokens;
|
||||
}, generate = (tokens) => {
|
||||
let output = '';
|
||||
tokens.map((token) => {
|
||||
switch (token.type) {
|
||||
case 'string':
|
||||
output += '"' + token.value + '"';
|
||||
break;
|
||||
default:
|
||||
output += token.value;
|
||||
break;
|
||||
}
|
||||
});
|
||||
return output;
|
||||
}, partialParse = (input) => JSON.parse(generate(unstrip(strip(tokenize(input)))));
|
||||
export { partialParse };
|
||||
//# sourceMappingURL=parser.mjs.map
|
||||
559
extracted-source/node_modules/@anthropic-ai/sdk/client.mjs
generated
vendored
Normal file
559
extracted-source/node_modules/@anthropic-ai/sdk/client.mjs
generated
vendored
Normal file
@@ -0,0 +1,559 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
var _BaseAnthropic_instances, _a, _BaseAnthropic_encoder, _BaseAnthropic_baseURLOverridden;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "./internal/tslib.mjs";
|
||||
import { uuid4 } from "./internal/utils/uuid.mjs";
|
||||
import { validatePositiveInteger, isAbsoluteURL, safeJSON } from "./internal/utils/values.mjs";
|
||||
import { sleep } from "./internal/utils/sleep.mjs";
|
||||
import { castToError, isAbortError } from "./internal/errors.mjs";
|
||||
import { getPlatformHeaders } from "./internal/detect-platform.mjs";
|
||||
import * as Shims from "./internal/shims.mjs";
|
||||
import * as Opts from "./internal/request-options.mjs";
|
||||
import { VERSION } from "./version.mjs";
|
||||
import * as Errors from "./core/error.mjs";
|
||||
import * as Pagination from "./core/pagination.mjs";
|
||||
import * as Uploads from "./core/uploads.mjs";
|
||||
import * as API from "./resources/index.mjs";
|
||||
import { APIPromise } from "./core/api-promise.mjs";
|
||||
import { Completions, } from "./resources/completions.mjs";
|
||||
import { Models } from "./resources/models.mjs";
|
||||
import { Beta, } from "./resources/beta/beta.mjs";
|
||||
import { Messages, } from "./resources/messages/messages.mjs";
|
||||
import { isRunningInBrowser } from "./internal/detect-platform.mjs";
|
||||
import { buildHeaders } from "./internal/headers.mjs";
|
||||
import { readEnv } from "./internal/utils/env.mjs";
|
||||
import { formatRequestDetails, loggerFor, parseLogLevel, } from "./internal/utils/log.mjs";
|
||||
import { isEmptyObj } from "./internal/utils/values.mjs";
|
||||
export const HUMAN_PROMPT = '\\n\\nHuman:';
|
||||
export const AI_PROMPT = '\\n\\nAssistant:';
|
||||
/**
|
||||
* Base class for Anthropic API clients.
|
||||
*/
|
||||
export class BaseAnthropic {
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic API.
|
||||
*
|
||||
* @param {string | null | undefined} [opts.apiKey=process.env['ANTHROPIC_API_KEY'] ?? null]
|
||||
* @param {string | null | undefined} [opts.authToken=process.env['ANTHROPIC_AUTH_TOKEN'] ?? null]
|
||||
* @param {string} [opts.baseURL=process.env['ANTHROPIC_BASE_URL'] ?? https://api.anthropic.com] - Override the default base URL for the API.
|
||||
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
||||
* @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls.
|
||||
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
||||
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
||||
* @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API.
|
||||
* @param {Record<string, string | undefined>} opts.defaultQuery - Default query parameters to include with every request to the API.
|
||||
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
||||
*/
|
||||
constructor({ baseURL = readEnv('ANTHROPIC_BASE_URL'), apiKey = readEnv('ANTHROPIC_API_KEY') ?? null, authToken = readEnv('ANTHROPIC_AUTH_TOKEN') ?? null, ...opts } = {}) {
|
||||
_BaseAnthropic_instances.add(this);
|
||||
_BaseAnthropic_encoder.set(this, void 0);
|
||||
const options = {
|
||||
apiKey,
|
||||
authToken,
|
||||
...opts,
|
||||
baseURL: baseURL || `https://api.anthropic.com`,
|
||||
};
|
||||
if (!options.dangerouslyAllowBrowser && isRunningInBrowser()) {
|
||||
throw new Errors.AnthropicError("It looks like you're running in a browser-like environment.\n\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\nIf you understand the risks and have appropriate mitigations in place,\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\n\nnew Anthropic({ apiKey, dangerouslyAllowBrowser: true });\n");
|
||||
}
|
||||
this.baseURL = options.baseURL;
|
||||
this.timeout = options.timeout ?? _a.DEFAULT_TIMEOUT /* 10 minutes */;
|
||||
this.logger = options.logger ?? console;
|
||||
const defaultLogLevel = 'warn';
|
||||
// Set default logLevel early so that we can log a warning in parseLogLevel.
|
||||
this.logLevel = defaultLogLevel;
|
||||
this.logLevel =
|
||||
parseLogLevel(options.logLevel, 'ClientOptions.logLevel', this) ??
|
||||
parseLogLevel(readEnv('ANTHROPIC_LOG'), "process.env['ANTHROPIC_LOG']", this) ??
|
||||
defaultLogLevel;
|
||||
this.fetchOptions = options.fetchOptions;
|
||||
this.maxRetries = options.maxRetries ?? 2;
|
||||
this.fetch = options.fetch ?? Shims.getDefaultFetch();
|
||||
__classPrivateFieldSet(this, _BaseAnthropic_encoder, Opts.FallbackEncoder, "f");
|
||||
this._options = options;
|
||||
this.apiKey = typeof apiKey === 'string' ? apiKey : null;
|
||||
this.authToken = authToken;
|
||||
}
|
||||
/**
|
||||
* Create a new client instance re-using the same options given to the current client with optional overriding.
|
||||
*/
|
||||
withOptions(options) {
|
||||
const client = new this.constructor({
|
||||
...this._options,
|
||||
baseURL: this.baseURL,
|
||||
maxRetries: this.maxRetries,
|
||||
timeout: this.timeout,
|
||||
logger: this.logger,
|
||||
logLevel: this.logLevel,
|
||||
fetch: this.fetch,
|
||||
fetchOptions: this.fetchOptions,
|
||||
apiKey: this.apiKey,
|
||||
authToken: this.authToken,
|
||||
...options,
|
||||
});
|
||||
return client;
|
||||
}
|
||||
defaultQuery() {
|
||||
return this._options.defaultQuery;
|
||||
}
|
||||
validateHeaders({ values, nulls }) {
|
||||
if (values.get('x-api-key') || values.get('authorization')) {
|
||||
return;
|
||||
}
|
||||
if (this.apiKey && values.get('x-api-key')) {
|
||||
return;
|
||||
}
|
||||
if (nulls.has('x-api-key')) {
|
||||
return;
|
||||
}
|
||||
if (this.authToken && values.get('authorization')) {
|
||||
return;
|
||||
}
|
||||
if (nulls.has('authorization')) {
|
||||
return;
|
||||
}
|
||||
throw new Error('Could not resolve authentication method. Expected either apiKey or authToken to be set. Or for one of the "X-Api-Key" or "Authorization" headers to be explicitly omitted');
|
||||
}
|
||||
async authHeaders(opts) {
|
||||
return buildHeaders([await this.apiKeyAuth(opts), await this.bearerAuth(opts)]);
|
||||
}
|
||||
async apiKeyAuth(opts) {
|
||||
if (this.apiKey == null) {
|
||||
return undefined;
|
||||
}
|
||||
return buildHeaders([{ 'X-Api-Key': this.apiKey }]);
|
||||
}
|
||||
async bearerAuth(opts) {
|
||||
if (this.authToken == null) {
|
||||
return undefined;
|
||||
}
|
||||
return buildHeaders([{ Authorization: `Bearer ${this.authToken}` }]);
|
||||
}
|
||||
/**
|
||||
* Basic re-implementation of `qs.stringify` for primitive types.
|
||||
*/
|
||||
stringifyQuery(query) {
|
||||
return Object.entries(query)
|
||||
.filter(([_, value]) => typeof value !== 'undefined')
|
||||
.map(([key, value]) => {
|
||||
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
|
||||
return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`;
|
||||
}
|
||||
if (value === null) {
|
||||
return `${encodeURIComponent(key)}=`;
|
||||
}
|
||||
throw new Errors.AnthropicError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`);
|
||||
})
|
||||
.join('&');
|
||||
}
|
||||
getUserAgent() {
|
||||
return `${this.constructor.name}/JS ${VERSION}`;
|
||||
}
|
||||
defaultIdempotencyKey() {
|
||||
return `stainless-node-retry-${uuid4()}`;
|
||||
}
|
||||
makeStatusError(status, error, message, headers) {
|
||||
return Errors.APIError.generate(status, error, message, headers);
|
||||
}
|
||||
buildURL(path, query, defaultBaseURL) {
|
||||
const baseURL = (!__classPrivateFieldGet(this, _BaseAnthropic_instances, "m", _BaseAnthropic_baseURLOverridden).call(this) && defaultBaseURL) || this.baseURL;
|
||||
const url = isAbsoluteURL(path) ?
|
||||
new URL(path)
|
||||
: new URL(baseURL + (baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path));
|
||||
const defaultQuery = this.defaultQuery();
|
||||
if (!isEmptyObj(defaultQuery)) {
|
||||
query = { ...defaultQuery, ...query };
|
||||
}
|
||||
if (typeof query === 'object' && query && !Array.isArray(query)) {
|
||||
url.search = this.stringifyQuery(query);
|
||||
}
|
||||
return url.toString();
|
||||
}
|
||||
_calculateNonstreamingTimeout(maxTokens) {
|
||||
const defaultTimeout = 10 * 60;
|
||||
const expectedTimeout = (60 * 60 * maxTokens) / 128000;
|
||||
if (expectedTimeout > defaultTimeout) {
|
||||
throw new Errors.AnthropicError('Streaming is required for operations that may take longer than 10 minutes. ' +
|
||||
'See https://github.com/anthropics/anthropic-sdk-typescript#streaming-responses for more details');
|
||||
}
|
||||
return defaultTimeout * 1000;
|
||||
}
|
||||
/**
|
||||
* Used as a callback for mutating the given `FinalRequestOptions` object.
|
||||
*/
|
||||
async prepareOptions(options) { }
|
||||
/**
|
||||
* Used as a callback for mutating the given `RequestInit` object.
|
||||
*
|
||||
* This is useful for cases where you want to add certain headers based off of
|
||||
* the request properties, e.g. `method` or `url`.
|
||||
*/
|
||||
async prepareRequest(request, { url, options }) { }
|
||||
get(path, opts) {
|
||||
return this.methodRequest('get', path, opts);
|
||||
}
|
||||
post(path, opts) {
|
||||
return this.methodRequest('post', path, opts);
|
||||
}
|
||||
patch(path, opts) {
|
||||
return this.methodRequest('patch', path, opts);
|
||||
}
|
||||
put(path, opts) {
|
||||
return this.methodRequest('put', path, opts);
|
||||
}
|
||||
delete(path, opts) {
|
||||
return this.methodRequest('delete', path, opts);
|
||||
}
|
||||
methodRequest(method, path, opts) {
|
||||
return this.request(Promise.resolve(opts).then((opts) => {
|
||||
return { method, path, ...opts };
|
||||
}));
|
||||
}
|
||||
request(options, remainingRetries = null) {
|
||||
return new APIPromise(this, this.makeRequest(options, remainingRetries, undefined));
|
||||
}
|
||||
async makeRequest(optionsInput, retriesRemaining, retryOfRequestLogID) {
|
||||
const options = await optionsInput;
|
||||
const maxRetries = options.maxRetries ?? this.maxRetries;
|
||||
if (retriesRemaining == null) {
|
||||
retriesRemaining = maxRetries;
|
||||
}
|
||||
await this.prepareOptions(options);
|
||||
const { req, url, timeout } = await this.buildRequest(options, {
|
||||
retryCount: maxRetries - retriesRemaining,
|
||||
});
|
||||
await this.prepareRequest(req, { url, options });
|
||||
/** Not an API request ID, just for correlating local log entries. */
|
||||
const requestLogID = 'log_' + ((Math.random() * (1 << 24)) | 0).toString(16).padStart(6, '0');
|
||||
const retryLogStr = retryOfRequestLogID === undefined ? '' : `, retryOf: ${retryOfRequestLogID}`;
|
||||
const startTime = Date.now();
|
||||
loggerFor(this).debug(`[${requestLogID}] sending request`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
method: options.method,
|
||||
url,
|
||||
options,
|
||||
headers: req.headers,
|
||||
}));
|
||||
if (options.signal?.aborted) {
|
||||
throw new Errors.APIUserAbortError();
|
||||
}
|
||||
const controller = new AbortController();
|
||||
const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(castToError);
|
||||
const headersTime = Date.now();
|
||||
if (response instanceof globalThis.Error) {
|
||||
const retryMessage = `retrying, ${retriesRemaining} attempts remaining`;
|
||||
if (options.signal?.aborted) {
|
||||
throw new Errors.APIUserAbortError();
|
||||
}
|
||||
// detect native connection timeout errors
|
||||
// deno throws "TypeError: error sending request for url (https://example/): client error (Connect): tcp connect error: Operation timed out (os error 60): Operation timed out (os error 60)"
|
||||
// undici throws "TypeError: fetch failed" with cause "ConnectTimeoutError: Connect Timeout Error (attempted address: example:443, timeout: 1ms)"
|
||||
// others do not provide enough information to distinguish timeouts from other connection errors
|
||||
const isTimeout = isAbortError(response) ||
|
||||
/timed? ?out/i.test(String(response) + ('cause' in response ? String(response.cause) : ''));
|
||||
if (retriesRemaining) {
|
||||
loggerFor(this).info(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} - ${retryMessage}`);
|
||||
loggerFor(this).debug(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} (${retryMessage})`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url,
|
||||
durationMs: headersTime - startTime,
|
||||
message: response.message,
|
||||
}));
|
||||
return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID);
|
||||
}
|
||||
loggerFor(this).info(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} - error; no more retries left`);
|
||||
loggerFor(this).debug(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} (error; no more retries left)`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url,
|
||||
durationMs: headersTime - startTime,
|
||||
message: response.message,
|
||||
}));
|
||||
if (isTimeout) {
|
||||
throw new Errors.APIConnectionTimeoutError();
|
||||
}
|
||||
throw new Errors.APIConnectionError({ cause: response });
|
||||
}
|
||||
const specialHeaders = [...response.headers.entries()]
|
||||
.filter(([name]) => name === 'request-id')
|
||||
.map(([name, value]) => ', ' + name + ': ' + JSON.stringify(value))
|
||||
.join('');
|
||||
const responseInfo = `[${requestLogID}${retryLogStr}${specialHeaders}] ${req.method} ${url} ${response.ok ? 'succeeded' : 'failed'} with status ${response.status} in ${headersTime - startTime}ms`;
|
||||
if (!response.ok) {
|
||||
const shouldRetry = await this.shouldRetry(response);
|
||||
if (retriesRemaining && shouldRetry) {
|
||||
const retryMessage = `retrying, ${retriesRemaining} attempts remaining`;
|
||||
// We don't need the body of this response.
|
||||
await Shims.CancelReadableStream(response.body);
|
||||
loggerFor(this).info(`${responseInfo} - ${retryMessage}`);
|
||||
loggerFor(this).debug(`[${requestLogID}] response error (${retryMessage})`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
durationMs: headersTime - startTime,
|
||||
}));
|
||||
return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID, response.headers);
|
||||
}
|
||||
const retryMessage = shouldRetry ? `error; no more retries left` : `error; not retryable`;
|
||||
loggerFor(this).info(`${responseInfo} - ${retryMessage}`);
|
||||
const errText = await response.text().catch((err) => castToError(err).message);
|
||||
const errJSON = safeJSON(errText);
|
||||
const errMessage = errJSON ? undefined : errText;
|
||||
loggerFor(this).debug(`[${requestLogID}] response error (${retryMessage})`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
message: errMessage,
|
||||
durationMs: Date.now() - startTime,
|
||||
}));
|
||||
const err = this.makeStatusError(response.status, errJSON, errMessage, response.headers);
|
||||
throw err;
|
||||
}
|
||||
loggerFor(this).info(responseInfo);
|
||||
loggerFor(this).debug(`[${requestLogID}] response start`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
durationMs: headersTime - startTime,
|
||||
}));
|
||||
return { response, options, controller, requestLogID, retryOfRequestLogID, startTime };
|
||||
}
|
||||
getAPIList(path, Page, opts) {
|
||||
return this.requestAPIList(Page, opts && 'then' in opts ?
|
||||
opts.then((opts) => ({ method: 'get', path, ...opts }))
|
||||
: { method: 'get', path, ...opts });
|
||||
}
|
||||
requestAPIList(Page, options) {
|
||||
const request = this.makeRequest(options, null, undefined);
|
||||
return new Pagination.PagePromise(this, request, Page);
|
||||
}
|
||||
async fetchWithTimeout(url, init, ms, controller) {
|
||||
const { signal, method, ...options } = init || {};
|
||||
// Avoid creating a closure over `this`, `init`, or `options` to prevent memory leaks.
|
||||
// An arrow function like `() => controller.abort()` captures the surrounding scope,
|
||||
// which includes the request body and other large objects. When the user passes a
|
||||
// long-lived AbortSignal, the listener prevents those objects from being GC'd for
|
||||
// the lifetime of the signal. Using `.bind()` only retains a reference to the
|
||||
// controller itself.
|
||||
const abort = this._makeAbort(controller);
|
||||
if (signal)
|
||||
signal.addEventListener('abort', abort, { once: true });
|
||||
const timeout = setTimeout(abort, ms);
|
||||
const isReadableBody = (globalThis.ReadableStream && options.body instanceof globalThis.ReadableStream) ||
|
||||
(typeof options.body === 'object' && options.body !== null && Symbol.asyncIterator in options.body);
|
||||
const fetchOptions = {
|
||||
signal: controller.signal,
|
||||
...(isReadableBody ? { duplex: 'half' } : {}),
|
||||
method: 'GET',
|
||||
...options,
|
||||
};
|
||||
if (method) {
|
||||
// Custom methods like 'patch' need to be uppercased
|
||||
// See https://github.com/nodejs/undici/issues/2294
|
||||
fetchOptions.method = method.toUpperCase();
|
||||
}
|
||||
try {
|
||||
// use undefined this binding; fetch errors if bound to something else in browser/cloudflare
|
||||
return await this.fetch.call(undefined, url, fetchOptions);
|
||||
}
|
||||
finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
async shouldRetry(response) {
|
||||
// Note this is not a standard header.
|
||||
const shouldRetryHeader = response.headers.get('x-should-retry');
|
||||
// If the server explicitly says whether or not to retry, obey.
|
||||
if (shouldRetryHeader === 'true')
|
||||
return true;
|
||||
if (shouldRetryHeader === 'false')
|
||||
return false;
|
||||
// Retry on request timeouts.
|
||||
if (response.status === 408)
|
||||
return true;
|
||||
// Retry on lock timeouts.
|
||||
if (response.status === 409)
|
||||
return true;
|
||||
// Retry on rate limits.
|
||||
if (response.status === 429)
|
||||
return true;
|
||||
// Retry internal errors.
|
||||
if (response.status >= 500)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
async retryRequest(options, retriesRemaining, requestLogID, responseHeaders) {
|
||||
let timeoutMillis;
|
||||
// Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it.
|
||||
const retryAfterMillisHeader = responseHeaders?.get('retry-after-ms');
|
||||
if (retryAfterMillisHeader) {
|
||||
const timeoutMs = parseFloat(retryAfterMillisHeader);
|
||||
if (!Number.isNaN(timeoutMs)) {
|
||||
timeoutMillis = timeoutMs;
|
||||
}
|
||||
}
|
||||
// About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
|
||||
const retryAfterHeader = responseHeaders?.get('retry-after');
|
||||
if (retryAfterHeader && !timeoutMillis) {
|
||||
const timeoutSeconds = parseFloat(retryAfterHeader);
|
||||
if (!Number.isNaN(timeoutSeconds)) {
|
||||
timeoutMillis = timeoutSeconds * 1000;
|
||||
}
|
||||
else {
|
||||
timeoutMillis = Date.parse(retryAfterHeader) - Date.now();
|
||||
}
|
||||
}
|
||||
// If the API asks us to wait a certain amount of time (and it's a reasonable amount),
|
||||
// just do what it says, but otherwise calculate a default
|
||||
if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) {
|
||||
const maxRetries = options.maxRetries ?? this.maxRetries;
|
||||
timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries);
|
||||
}
|
||||
await sleep(timeoutMillis);
|
||||
return this.makeRequest(options, retriesRemaining - 1, requestLogID);
|
||||
}
|
||||
calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) {
|
||||
const initialRetryDelay = 0.5;
|
||||
const maxRetryDelay = 8.0;
|
||||
const numRetries = maxRetries - retriesRemaining;
|
||||
// Apply exponential backoff, but not more than the max.
|
||||
const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay);
|
||||
// Apply some jitter, take up to at most 25 percent of the retry time.
|
||||
const jitter = 1 - Math.random() * 0.25;
|
||||
return sleepSeconds * jitter * 1000;
|
||||
}
|
||||
calculateNonstreamingTimeout(maxTokens, maxNonstreamingTokens) {
|
||||
const maxTime = 60 * 60 * 1000; // 60 minutes
|
||||
const defaultTime = 60 * 10 * 1000; // 10 minutes
|
||||
const expectedTime = (maxTime * maxTokens) / 128000;
|
||||
if (expectedTime > defaultTime || (maxNonstreamingTokens != null && maxTokens > maxNonstreamingTokens)) {
|
||||
throw new Errors.AnthropicError('Streaming is required for operations that may take longer than 10 minutes. See https://github.com/anthropics/anthropic-sdk-typescript#long-requests for more details');
|
||||
}
|
||||
return defaultTime;
|
||||
}
|
||||
async buildRequest(inputOptions, { retryCount = 0 } = {}) {
|
||||
const options = { ...inputOptions };
|
||||
const { method, path, query, defaultBaseURL } = options;
|
||||
const url = this.buildURL(path, query, defaultBaseURL);
|
||||
if ('timeout' in options)
|
||||
validatePositiveInteger('timeout', options.timeout);
|
||||
options.timeout = options.timeout ?? this.timeout;
|
||||
const { bodyHeaders, body } = this.buildBody({ options });
|
||||
const reqHeaders = await this.buildHeaders({ options: inputOptions, method, bodyHeaders, retryCount });
|
||||
const req = {
|
||||
method,
|
||||
headers: reqHeaders,
|
||||
...(options.signal && { signal: options.signal }),
|
||||
...(globalThis.ReadableStream &&
|
||||
body instanceof globalThis.ReadableStream && { duplex: 'half' }),
|
||||
...(body && { body }),
|
||||
...(this.fetchOptions ?? {}),
|
||||
...(options.fetchOptions ?? {}),
|
||||
};
|
||||
return { req, url, timeout: options.timeout };
|
||||
}
|
||||
async buildHeaders({ options, method, bodyHeaders, retryCount, }) {
|
||||
let idempotencyHeaders = {};
|
||||
if (this.idempotencyHeader && method !== 'get') {
|
||||
if (!options.idempotencyKey)
|
||||
options.idempotencyKey = this.defaultIdempotencyKey();
|
||||
idempotencyHeaders[this.idempotencyHeader] = options.idempotencyKey;
|
||||
}
|
||||
const headers = buildHeaders([
|
||||
idempotencyHeaders,
|
||||
{
|
||||
Accept: 'application/json',
|
||||
'User-Agent': this.getUserAgent(),
|
||||
'X-Stainless-Retry-Count': String(retryCount),
|
||||
...(options.timeout ? { 'X-Stainless-Timeout': String(Math.trunc(options.timeout / 1000)) } : {}),
|
||||
...getPlatformHeaders(),
|
||||
...(this._options.dangerouslyAllowBrowser ?
|
||||
{ 'anthropic-dangerous-direct-browser-access': 'true' }
|
||||
: undefined),
|
||||
'anthropic-version': '2023-06-01',
|
||||
},
|
||||
await this.authHeaders(options),
|
||||
this._options.defaultHeaders,
|
||||
bodyHeaders,
|
||||
options.headers,
|
||||
]);
|
||||
this.validateHeaders(headers);
|
||||
return headers.values;
|
||||
}
|
||||
_makeAbort(controller) {
|
||||
// note: we can't just inline this method inside `fetchWithTimeout()` because then the closure
|
||||
// would capture all request options, and cause a memory leak.
|
||||
return () => controller.abort();
|
||||
}
|
||||
buildBody({ options: { body, headers: rawHeaders } }) {
|
||||
if (!body) {
|
||||
return { bodyHeaders: undefined, body: undefined };
|
||||
}
|
||||
const headers = buildHeaders([rawHeaders]);
|
||||
if (
|
||||
// Pass raw type verbatim
|
||||
ArrayBuffer.isView(body) ||
|
||||
body instanceof ArrayBuffer ||
|
||||
body instanceof DataView ||
|
||||
(typeof body === 'string' &&
|
||||
// Preserve legacy string encoding behavior for now
|
||||
headers.values.has('content-type')) ||
|
||||
// `Blob` is superset of `File`
|
||||
(globalThis.Blob && body instanceof globalThis.Blob) ||
|
||||
// `FormData` -> `multipart/form-data`
|
||||
body instanceof FormData ||
|
||||
// `URLSearchParams` -> `application/x-www-form-urlencoded`
|
||||
body instanceof URLSearchParams ||
|
||||
// Send chunked stream (each chunk has own `length`)
|
||||
(globalThis.ReadableStream && body instanceof globalThis.ReadableStream)) {
|
||||
return { bodyHeaders: undefined, body: body };
|
||||
}
|
||||
else if (typeof body === 'object' &&
|
||||
(Symbol.asyncIterator in body ||
|
||||
(Symbol.iterator in body && 'next' in body && typeof body.next === 'function'))) {
|
||||
return { bodyHeaders: undefined, body: Shims.ReadableStreamFrom(body) };
|
||||
}
|
||||
else {
|
||||
return __classPrivateFieldGet(this, _BaseAnthropic_encoder, "f").call(this, { body, headers });
|
||||
}
|
||||
}
|
||||
}
|
||||
_a = BaseAnthropic, _BaseAnthropic_encoder = new WeakMap(), _BaseAnthropic_instances = new WeakSet(), _BaseAnthropic_baseURLOverridden = function _BaseAnthropic_baseURLOverridden() {
|
||||
return this.baseURL !== 'https://api.anthropic.com';
|
||||
};
|
||||
BaseAnthropic.Anthropic = _a;
|
||||
BaseAnthropic.HUMAN_PROMPT = HUMAN_PROMPT;
|
||||
BaseAnthropic.AI_PROMPT = AI_PROMPT;
|
||||
BaseAnthropic.DEFAULT_TIMEOUT = 600000; // 10 minutes
|
||||
BaseAnthropic.AnthropicError = Errors.AnthropicError;
|
||||
BaseAnthropic.APIError = Errors.APIError;
|
||||
BaseAnthropic.APIConnectionError = Errors.APIConnectionError;
|
||||
BaseAnthropic.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError;
|
||||
BaseAnthropic.APIUserAbortError = Errors.APIUserAbortError;
|
||||
BaseAnthropic.NotFoundError = Errors.NotFoundError;
|
||||
BaseAnthropic.ConflictError = Errors.ConflictError;
|
||||
BaseAnthropic.RateLimitError = Errors.RateLimitError;
|
||||
BaseAnthropic.BadRequestError = Errors.BadRequestError;
|
||||
BaseAnthropic.AuthenticationError = Errors.AuthenticationError;
|
||||
BaseAnthropic.InternalServerError = Errors.InternalServerError;
|
||||
BaseAnthropic.PermissionDeniedError = Errors.PermissionDeniedError;
|
||||
BaseAnthropic.UnprocessableEntityError = Errors.UnprocessableEntityError;
|
||||
BaseAnthropic.toFile = Uploads.toFile;
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic API.
|
||||
*/
|
||||
export class Anthropic extends BaseAnthropic {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.completions = new API.Completions(this);
|
||||
this.messages = new API.Messages(this);
|
||||
this.models = new API.Models(this);
|
||||
this.beta = new API.Beta(this);
|
||||
}
|
||||
}
|
||||
Anthropic.Completions = Completions;
|
||||
Anthropic.Messages = Messages;
|
||||
Anthropic.Models = Models;
|
||||
Anthropic.Beta = Beta;
|
||||
//# sourceMappingURL=client.mjs.map
|
||||
72
extracted-source/node_modules/@anthropic-ai/sdk/core/api-promise.mjs
generated
vendored
Normal file
72
extracted-source/node_modules/@anthropic-ai/sdk/core/api-promise.mjs
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
var _APIPromise_client;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../internal/tslib.mjs";
|
||||
import { defaultParseResponse, addRequestID, } from "../internal/parse.mjs";
|
||||
/**
|
||||
* A subclass of `Promise` providing additional helper methods
|
||||
* for interacting with the SDK.
|
||||
*/
|
||||
export class APIPromise extends Promise {
|
||||
constructor(client, responsePromise, parseResponse = defaultParseResponse) {
|
||||
super((resolve) => {
|
||||
// this is maybe a bit weird but this has to be a no-op to not implicitly
|
||||
// parse the response body; instead .then, .catch, .finally are overridden
|
||||
// to parse the response
|
||||
resolve(null);
|
||||
});
|
||||
this.responsePromise = responsePromise;
|
||||
this.parseResponse = parseResponse;
|
||||
_APIPromise_client.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _APIPromise_client, client, "f");
|
||||
}
|
||||
_thenUnwrap(transform) {
|
||||
return new APIPromise(__classPrivateFieldGet(this, _APIPromise_client, "f"), this.responsePromise, async (client, props) => addRequestID(transform(await this.parseResponse(client, props), props), props.response));
|
||||
}
|
||||
/**
|
||||
* Gets the raw `Response` instance instead of parsing the response
|
||||
* data.
|
||||
*
|
||||
* If you want to parse the response body but still get the `Response`
|
||||
* instance, you can use {@link withResponse()}.
|
||||
*
|
||||
* 👋 Getting the wrong TypeScript type for `Response`?
|
||||
* Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]`
|
||||
* to your `tsconfig.json`.
|
||||
*/
|
||||
asResponse() {
|
||||
return this.responsePromise.then((p) => p.response);
|
||||
}
|
||||
/**
|
||||
* Gets the parsed response data, the raw `Response` instance and the ID of the request,
|
||||
* returned via the `request-id` header which is useful for debugging requests and resporting
|
||||
* issues to Anthropic.
|
||||
*
|
||||
* If you just want to get the raw `Response` instance without parsing it,
|
||||
* you can use {@link asResponse()}.
|
||||
*
|
||||
* 👋 Getting the wrong TypeScript type for `Response`?
|
||||
* Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]`
|
||||
* to your `tsconfig.json`.
|
||||
*/
|
||||
async withResponse() {
|
||||
const [data, response] = await Promise.all([this.parse(), this.asResponse()]);
|
||||
return { data, response, request_id: response.headers.get('request-id') };
|
||||
}
|
||||
parse() {
|
||||
if (!this.parsedPromise) {
|
||||
this.parsedPromise = this.responsePromise.then((data) => this.parseResponse(__classPrivateFieldGet(this, _APIPromise_client, "f"), data));
|
||||
}
|
||||
return this.parsedPromise;
|
||||
}
|
||||
then(onfulfilled, onrejected) {
|
||||
return this.parse().then(onfulfilled, onrejected);
|
||||
}
|
||||
catch(onrejected) {
|
||||
return this.parse().catch(onrejected);
|
||||
}
|
||||
finally(onfinally) {
|
||||
return this.parse().finally(onfinally);
|
||||
}
|
||||
}
|
||||
_APIPromise_client = new WeakMap();
|
||||
//# sourceMappingURL=api-promise.mjs.map
|
||||
98
extracted-source/node_modules/@anthropic-ai/sdk/core/error.mjs
generated
vendored
Normal file
98
extracted-source/node_modules/@anthropic-ai/sdk/core/error.mjs
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { castToError } from "../internal/errors.mjs";
|
||||
export class AnthropicError extends Error {
|
||||
}
|
||||
export class APIError extends AnthropicError {
|
||||
constructor(status, error, message, headers) {
|
||||
super(`${APIError.makeMessage(status, error, message)}`);
|
||||
this.status = status;
|
||||
this.headers = headers;
|
||||
this.requestID = headers?.get('request-id');
|
||||
this.error = error;
|
||||
}
|
||||
static makeMessage(status, error, message) {
|
||||
const msg = error?.message ?
|
||||
typeof error.message === 'string' ?
|
||||
error.message
|
||||
: JSON.stringify(error.message)
|
||||
: error ? JSON.stringify(error)
|
||||
: message;
|
||||
if (status && msg) {
|
||||
return `${status} ${msg}`;
|
||||
}
|
||||
if (status) {
|
||||
return `${status} status code (no body)`;
|
||||
}
|
||||
if (msg) {
|
||||
return msg;
|
||||
}
|
||||
return '(no status code or body)';
|
||||
}
|
||||
static generate(status, errorResponse, message, headers) {
|
||||
if (!status || !headers) {
|
||||
return new APIConnectionError({ message, cause: castToError(errorResponse) });
|
||||
}
|
||||
const error = errorResponse;
|
||||
if (status === 400) {
|
||||
return new BadRequestError(status, error, message, headers);
|
||||
}
|
||||
if (status === 401) {
|
||||
return new AuthenticationError(status, error, message, headers);
|
||||
}
|
||||
if (status === 403) {
|
||||
return new PermissionDeniedError(status, error, message, headers);
|
||||
}
|
||||
if (status === 404) {
|
||||
return new NotFoundError(status, error, message, headers);
|
||||
}
|
||||
if (status === 409) {
|
||||
return new ConflictError(status, error, message, headers);
|
||||
}
|
||||
if (status === 422) {
|
||||
return new UnprocessableEntityError(status, error, message, headers);
|
||||
}
|
||||
if (status === 429) {
|
||||
return new RateLimitError(status, error, message, headers);
|
||||
}
|
||||
if (status >= 500) {
|
||||
return new InternalServerError(status, error, message, headers);
|
||||
}
|
||||
return new APIError(status, error, message, headers);
|
||||
}
|
||||
}
|
||||
export class APIUserAbortError extends APIError {
|
||||
constructor({ message } = {}) {
|
||||
super(undefined, undefined, message || 'Request was aborted.', undefined);
|
||||
}
|
||||
}
|
||||
export class APIConnectionError extends APIError {
|
||||
constructor({ message, cause }) {
|
||||
super(undefined, undefined, message || 'Connection error.', undefined);
|
||||
// in some environments the 'cause' property is already declared
|
||||
// @ts-ignore
|
||||
if (cause)
|
||||
this.cause = cause;
|
||||
}
|
||||
}
|
||||
export class APIConnectionTimeoutError extends APIConnectionError {
|
||||
constructor({ message } = {}) {
|
||||
super({ message: message ?? 'Request timed out.' });
|
||||
}
|
||||
}
|
||||
export class BadRequestError extends APIError {
|
||||
}
|
||||
export class AuthenticationError extends APIError {
|
||||
}
|
||||
export class PermissionDeniedError extends APIError {
|
||||
}
|
||||
export class NotFoundError extends APIError {
|
||||
}
|
||||
export class ConflictError extends APIError {
|
||||
}
|
||||
export class UnprocessableEntityError extends APIError {
|
||||
}
|
||||
export class RateLimitError extends APIError {
|
||||
}
|
||||
export class InternalServerError extends APIError {
|
||||
}
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
177
extracted-source/node_modules/@anthropic-ai/sdk/core/pagination.mjs
generated
vendored
Normal file
177
extracted-source/node_modules/@anthropic-ai/sdk/core/pagination.mjs
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
var _AbstractPage_client;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../internal/tslib.mjs";
|
||||
import { AnthropicError } from "./error.mjs";
|
||||
import { defaultParseResponse } from "../internal/parse.mjs";
|
||||
import { APIPromise } from "./api-promise.mjs";
|
||||
import { maybeObj } from "../internal/utils/values.mjs";
|
||||
export class AbstractPage {
|
||||
constructor(client, response, body, options) {
|
||||
_AbstractPage_client.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _AbstractPage_client, client, "f");
|
||||
this.options = options;
|
||||
this.response = response;
|
||||
this.body = body;
|
||||
}
|
||||
hasNextPage() {
|
||||
const items = this.getPaginatedItems();
|
||||
if (!items.length)
|
||||
return false;
|
||||
return this.nextPageRequestOptions() != null;
|
||||
}
|
||||
async getNextPage() {
|
||||
const nextOptions = this.nextPageRequestOptions();
|
||||
if (!nextOptions) {
|
||||
throw new AnthropicError('No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.');
|
||||
}
|
||||
return await __classPrivateFieldGet(this, _AbstractPage_client, "f").requestAPIList(this.constructor, nextOptions);
|
||||
}
|
||||
async *iterPages() {
|
||||
let page = this;
|
||||
yield page;
|
||||
while (page.hasNextPage()) {
|
||||
page = await page.getNextPage();
|
||||
yield page;
|
||||
}
|
||||
}
|
||||
async *[(_AbstractPage_client = new WeakMap(), Symbol.asyncIterator)]() {
|
||||
for await (const page of this.iterPages()) {
|
||||
for (const item of page.getPaginatedItems()) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This subclass of Promise will resolve to an instantiated Page once the request completes.
|
||||
*
|
||||
* It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg:
|
||||
*
|
||||
* for await (const item of client.items.list()) {
|
||||
* console.log(item)
|
||||
* }
|
||||
*/
|
||||
export class PagePromise extends APIPromise {
|
||||
constructor(client, request, Page) {
|
||||
super(client, request, async (client, props) => new Page(client, props.response, await defaultParseResponse(client, props), props.options));
|
||||
}
|
||||
/**
|
||||
* Allow auto-paginating iteration on an unawaited list call, eg:
|
||||
*
|
||||
* for await (const item of client.items.list()) {
|
||||
* console.log(item)
|
||||
* }
|
||||
*/
|
||||
async *[Symbol.asyncIterator]() {
|
||||
const page = await this;
|
||||
for await (const item of page) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
export class Page extends AbstractPage {
|
||||
constructor(client, response, body, options) {
|
||||
super(client, response, body, options);
|
||||
this.data = body.data || [];
|
||||
this.has_more = body.has_more || false;
|
||||
this.first_id = body.first_id || null;
|
||||
this.last_id = body.last_id || null;
|
||||
}
|
||||
getPaginatedItems() {
|
||||
return this.data ?? [];
|
||||
}
|
||||
hasNextPage() {
|
||||
if (this.has_more === false) {
|
||||
return false;
|
||||
}
|
||||
return super.hasNextPage();
|
||||
}
|
||||
nextPageRequestOptions() {
|
||||
if (this.options.query?.['before_id']) {
|
||||
// in reverse
|
||||
const first_id = this.first_id;
|
||||
if (!first_id) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...this.options,
|
||||
query: {
|
||||
...maybeObj(this.options.query),
|
||||
before_id: first_id,
|
||||
},
|
||||
};
|
||||
}
|
||||
const cursor = this.last_id;
|
||||
if (!cursor) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...this.options,
|
||||
query: {
|
||||
...maybeObj(this.options.query),
|
||||
after_id: cursor,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
export class TokenPage extends AbstractPage {
|
||||
constructor(client, response, body, options) {
|
||||
super(client, response, body, options);
|
||||
this.data = body.data || [];
|
||||
this.has_more = body.has_more || false;
|
||||
this.next_page = body.next_page || null;
|
||||
}
|
||||
getPaginatedItems() {
|
||||
return this.data ?? [];
|
||||
}
|
||||
hasNextPage() {
|
||||
if (this.has_more === false) {
|
||||
return false;
|
||||
}
|
||||
return super.hasNextPage();
|
||||
}
|
||||
nextPageRequestOptions() {
|
||||
const cursor = this.next_page;
|
||||
if (!cursor) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...this.options,
|
||||
query: {
|
||||
...maybeObj(this.options.query),
|
||||
page_token: cursor,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
export class PageCursor extends AbstractPage {
|
||||
constructor(client, response, body, options) {
|
||||
super(client, response, body, options);
|
||||
this.data = body.data || [];
|
||||
this.has_more = body.has_more || false;
|
||||
this.next_page = body.next_page || null;
|
||||
}
|
||||
getPaginatedItems() {
|
||||
return this.data ?? [];
|
||||
}
|
||||
hasNextPage() {
|
||||
if (this.has_more === false) {
|
||||
return false;
|
||||
}
|
||||
return super.hasNextPage();
|
||||
}
|
||||
nextPageRequestOptions() {
|
||||
const cursor = this.next_page;
|
||||
if (!cursor) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...this.options,
|
||||
query: {
|
||||
...maybeObj(this.options.query),
|
||||
page: cursor,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=pagination.mjs.map
|
||||
7
extracted-source/node_modules/@anthropic-ai/sdk/core/resource.mjs
generated
vendored
Normal file
7
extracted-source/node_modules/@anthropic-ai/sdk/core/resource.mjs
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export class APIResource {
|
||||
constructor(client) {
|
||||
this._client = client;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=resource.mjs.map
|
||||
283
extracted-source/node_modules/@anthropic-ai/sdk/core/streaming.mjs
generated
vendored
Normal file
283
extracted-source/node_modules/@anthropic-ai/sdk/core/streaming.mjs
generated
vendored
Normal file
@@ -0,0 +1,283 @@
|
||||
var _Stream_client;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../internal/tslib.mjs";
|
||||
import { AnthropicError } from "./error.mjs";
|
||||
import { makeReadableStream } from "../internal/shims.mjs";
|
||||
import { findDoubleNewlineIndex, LineDecoder } from "../internal/decoders/line.mjs";
|
||||
import { ReadableStreamToAsyncIterable } from "../internal/shims.mjs";
|
||||
import { isAbortError } from "../internal/errors.mjs";
|
||||
import { safeJSON } from "../internal/utils/values.mjs";
|
||||
import { encodeUTF8 } from "../internal/utils/bytes.mjs";
|
||||
import { loggerFor } from "../internal/utils/log.mjs";
|
||||
import { APIError } from "./error.mjs";
|
||||
export class Stream {
|
||||
constructor(iterator, controller, client) {
|
||||
this.iterator = iterator;
|
||||
_Stream_client.set(this, void 0);
|
||||
this.controller = controller;
|
||||
__classPrivateFieldSet(this, _Stream_client, client, "f");
|
||||
}
|
||||
static fromSSEResponse(response, controller, client) {
|
||||
let consumed = false;
|
||||
const logger = client ? loggerFor(client) : console;
|
||||
async function* iterator() {
|
||||
if (consumed) {
|
||||
throw new AnthropicError('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
|
||||
}
|
||||
consumed = true;
|
||||
let done = false;
|
||||
try {
|
||||
for await (const sse of _iterSSEMessages(response, controller)) {
|
||||
if (sse.event === 'completion') {
|
||||
try {
|
||||
yield JSON.parse(sse.data);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(`Could not parse message into JSON:`, sse.data);
|
||||
logger.error(`From chunk:`, sse.raw);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (sse.event === 'message_start' ||
|
||||
sse.event === 'message_delta' ||
|
||||
sse.event === 'message_stop' ||
|
||||
sse.event === 'content_block_start' ||
|
||||
sse.event === 'content_block_delta' ||
|
||||
sse.event === 'content_block_stop') {
|
||||
try {
|
||||
yield JSON.parse(sse.data);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(`Could not parse message into JSON:`, sse.data);
|
||||
logger.error(`From chunk:`, sse.raw);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (sse.event === 'ping') {
|
||||
continue;
|
||||
}
|
||||
if (sse.event === 'error') {
|
||||
throw new APIError(undefined, safeJSON(sse.data) ?? sse.data, undefined, response.headers);
|
||||
}
|
||||
}
|
||||
done = true;
|
||||
}
|
||||
catch (e) {
|
||||
// If the user calls `stream.controller.abort()`, we should exit without throwing.
|
||||
if (isAbortError(e))
|
||||
return;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
// If the user `break`s, abort the ongoing request.
|
||||
if (!done)
|
||||
controller.abort();
|
||||
}
|
||||
}
|
||||
return new Stream(iterator, controller, client);
|
||||
}
|
||||
/**
|
||||
* Generates a Stream from a newline-separated ReadableStream
|
||||
* where each item is a JSON value.
|
||||
*/
|
||||
static fromReadableStream(readableStream, controller, client) {
|
||||
let consumed = false;
|
||||
async function* iterLines() {
|
||||
const lineDecoder = new LineDecoder();
|
||||
const iter = ReadableStreamToAsyncIterable(readableStream);
|
||||
for await (const chunk of iter) {
|
||||
for (const line of lineDecoder.decode(chunk)) {
|
||||
yield line;
|
||||
}
|
||||
}
|
||||
for (const line of lineDecoder.flush()) {
|
||||
yield line;
|
||||
}
|
||||
}
|
||||
async function* iterator() {
|
||||
if (consumed) {
|
||||
throw new AnthropicError('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
|
||||
}
|
||||
consumed = true;
|
||||
let done = false;
|
||||
try {
|
||||
for await (const line of iterLines()) {
|
||||
if (done)
|
||||
continue;
|
||||
if (line)
|
||||
yield JSON.parse(line);
|
||||
}
|
||||
done = true;
|
||||
}
|
||||
catch (e) {
|
||||
// If the user calls `stream.controller.abort()`, we should exit without throwing.
|
||||
if (isAbortError(e))
|
||||
return;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
// If the user `break`s, abort the ongoing request.
|
||||
if (!done)
|
||||
controller.abort();
|
||||
}
|
||||
}
|
||||
return new Stream(iterator, controller, client);
|
||||
}
|
||||
[(_Stream_client = new WeakMap(), Symbol.asyncIterator)]() {
|
||||
return this.iterator();
|
||||
}
|
||||
/**
|
||||
* Splits the stream into two streams which can be
|
||||
* independently read from at different speeds.
|
||||
*/
|
||||
tee() {
|
||||
const left = [];
|
||||
const right = [];
|
||||
const iterator = this.iterator();
|
||||
const teeIterator = (queue) => {
|
||||
return {
|
||||
next: () => {
|
||||
if (queue.length === 0) {
|
||||
const result = iterator.next();
|
||||
left.push(result);
|
||||
right.push(result);
|
||||
}
|
||||
return queue.shift();
|
||||
},
|
||||
};
|
||||
};
|
||||
return [
|
||||
new Stream(() => teeIterator(left), this.controller, __classPrivateFieldGet(this, _Stream_client, "f")),
|
||||
new Stream(() => teeIterator(right), this.controller, __classPrivateFieldGet(this, _Stream_client, "f")),
|
||||
];
|
||||
}
|
||||
/**
|
||||
* Converts this stream to a newline-separated ReadableStream of
|
||||
* JSON stringified values in the stream
|
||||
* which can be turned back into a Stream with `Stream.fromReadableStream()`.
|
||||
*/
|
||||
toReadableStream() {
|
||||
const self = this;
|
||||
let iter;
|
||||
return makeReadableStream({
|
||||
async start() {
|
||||
iter = self[Symbol.asyncIterator]();
|
||||
},
|
||||
async pull(ctrl) {
|
||||
try {
|
||||
const { value, done } = await iter.next();
|
||||
if (done)
|
||||
return ctrl.close();
|
||||
const bytes = encodeUTF8(JSON.stringify(value) + '\n');
|
||||
ctrl.enqueue(bytes);
|
||||
}
|
||||
catch (err) {
|
||||
ctrl.error(err);
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await iter.return?.();
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
export async function* _iterSSEMessages(response, controller) {
|
||||
if (!response.body) {
|
||||
controller.abort();
|
||||
if (typeof globalThis.navigator !== 'undefined' &&
|
||||
globalThis.navigator.product === 'ReactNative') {
|
||||
throw new AnthropicError(`The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`);
|
||||
}
|
||||
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
||||
}
|
||||
const sseDecoder = new SSEDecoder();
|
||||
const lineDecoder = new LineDecoder();
|
||||
const iter = ReadableStreamToAsyncIterable(response.body);
|
||||
for await (const sseChunk of iterSSEChunks(iter)) {
|
||||
for (const line of lineDecoder.decode(sseChunk)) {
|
||||
const sse = sseDecoder.decode(line);
|
||||
if (sse)
|
||||
yield sse;
|
||||
}
|
||||
}
|
||||
for (const line of lineDecoder.flush()) {
|
||||
const sse = sseDecoder.decode(line);
|
||||
if (sse)
|
||||
yield sse;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Given an async iterable iterator, iterates over it and yields full
|
||||
* SSE chunks, i.e. yields when a double new-line is encountered.
|
||||
*/
|
||||
async function* iterSSEChunks(iterator) {
|
||||
let data = new Uint8Array();
|
||||
for await (const chunk of iterator) {
|
||||
if (chunk == null) {
|
||||
continue;
|
||||
}
|
||||
const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk)
|
||||
: typeof chunk === 'string' ? encodeUTF8(chunk)
|
||||
: chunk;
|
||||
let newData = new Uint8Array(data.length + binaryChunk.length);
|
||||
newData.set(data);
|
||||
newData.set(binaryChunk, data.length);
|
||||
data = newData;
|
||||
let patternIndex;
|
||||
while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) {
|
||||
yield data.slice(0, patternIndex);
|
||||
data = data.slice(patternIndex);
|
||||
}
|
||||
}
|
||||
if (data.length > 0) {
|
||||
yield data;
|
||||
}
|
||||
}
|
||||
class SSEDecoder {
|
||||
constructor() {
|
||||
this.event = null;
|
||||
this.data = [];
|
||||
this.chunks = [];
|
||||
}
|
||||
decode(line) {
|
||||
if (line.endsWith('\r')) {
|
||||
line = line.substring(0, line.length - 1);
|
||||
}
|
||||
if (!line) {
|
||||
// empty line and we didn't previously encounter any messages
|
||||
if (!this.event && !this.data.length)
|
||||
return null;
|
||||
const sse = {
|
||||
event: this.event,
|
||||
data: this.data.join('\n'),
|
||||
raw: this.chunks,
|
||||
};
|
||||
this.event = null;
|
||||
this.data = [];
|
||||
this.chunks = [];
|
||||
return sse;
|
||||
}
|
||||
this.chunks.push(line);
|
||||
if (line.startsWith(':')) {
|
||||
return null;
|
||||
}
|
||||
let [fieldname, _, value] = partition(line, ':');
|
||||
if (value.startsWith(' ')) {
|
||||
value = value.substring(1);
|
||||
}
|
||||
if (fieldname === 'event') {
|
||||
this.event = value;
|
||||
}
|
||||
else if (fieldname === 'data') {
|
||||
this.data.push(value);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
function partition(str, delimiter) {
|
||||
const index = str.indexOf(delimiter);
|
||||
if (index !== -1) {
|
||||
return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)];
|
||||
}
|
||||
return [str, '', ''];
|
||||
}
|
||||
//# sourceMappingURL=streaming.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/sdk/core/uploads.mjs
generated
vendored
Normal file
2
extracted-source/node_modules/@anthropic-ai/sdk/core/uploads.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export { toFile } from "../internal/to-file.mjs";
|
||||
//# sourceMappingURL=uploads.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/sdk/error.mjs
generated
vendored
Normal file
2
extracted-source/node_modules/@anthropic-ai/sdk/error.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from "./core/error.mjs";
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
8
extracted-source/node_modules/@anthropic-ai/sdk/index.mjs
generated
vendored
Normal file
8
extracted-source/node_modules/@anthropic-ai/sdk/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export { Anthropic as default } from "./client.mjs";
|
||||
export { toFile } from "./core/uploads.mjs";
|
||||
export { APIPromise } from "./core/api-promise.mjs";
|
||||
export { BaseAnthropic, Anthropic, HUMAN_PROMPT, AI_PROMPT } from "./client.mjs";
|
||||
export { PagePromise } from "./core/pagination.mjs";
|
||||
export { AnthropicError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, NotFoundError, ConflictError, RateLimitError, BadRequestError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, } from "./core/error.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/constants.mjs
generated
vendored
Normal file
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/constants.mjs
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
// File containing shared constants
|
||||
/**
|
||||
* Model-specific timeout constraints for non-streaming requests
|
||||
*/
|
||||
export const MODEL_NONSTREAMING_TOKENS = {
|
||||
'claude-opus-4-20250514': 8192,
|
||||
'claude-opus-4-0': 8192,
|
||||
'claude-4-opus-20250514': 8192,
|
||||
'anthropic.claude-opus-4-20250514-v1:0': 8192,
|
||||
'claude-opus-4@20250514': 8192,
|
||||
'claude-opus-4-1-20250805': 8192,
|
||||
'anthropic.claude-opus-4-1-20250805-v1:0': 8192,
|
||||
'claude-opus-4-1@20250805': 8192,
|
||||
};
|
||||
//# sourceMappingURL=constants.mjs.map
|
||||
35
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/jsonl.mjs
generated
vendored
Normal file
35
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/jsonl.mjs
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
import { ReadableStreamToAsyncIterable } from "../shims.mjs";
|
||||
import { LineDecoder } from "./line.mjs";
|
||||
export class JSONLDecoder {
|
||||
constructor(iterator, controller) {
|
||||
this.iterator = iterator;
|
||||
this.controller = controller;
|
||||
}
|
||||
async *decoder() {
|
||||
const lineDecoder = new LineDecoder();
|
||||
for await (const chunk of this.iterator) {
|
||||
for (const line of lineDecoder.decode(chunk)) {
|
||||
yield JSON.parse(line);
|
||||
}
|
||||
}
|
||||
for (const line of lineDecoder.flush()) {
|
||||
yield JSON.parse(line);
|
||||
}
|
||||
}
|
||||
[Symbol.asyncIterator]() {
|
||||
return this.decoder();
|
||||
}
|
||||
static fromResponse(response, controller) {
|
||||
if (!response.body) {
|
||||
controller.abort();
|
||||
if (typeof globalThis.navigator !== 'undefined' &&
|
||||
globalThis.navigator.product === 'ReactNative') {
|
||||
throw new AnthropicError(`The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`);
|
||||
}
|
||||
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
||||
}
|
||||
return new JSONLDecoder(ReadableStreamToAsyncIterable(response.body), controller);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=jsonl.mjs.map
|
||||
108
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/line.mjs
generated
vendored
Normal file
108
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/line.mjs
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
var _LineDecoder_buffer, _LineDecoder_carriageReturnIndex;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../tslib.mjs";
|
||||
import { concatBytes, decodeUTF8, encodeUTF8 } from "../utils/bytes.mjs";
|
||||
/**
|
||||
* A re-implementation of httpx's `LineDecoder` in Python that handles incrementally
|
||||
* reading lines from text.
|
||||
*
|
||||
* https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258
|
||||
*/
|
||||
export class LineDecoder {
|
||||
constructor() {
|
||||
_LineDecoder_buffer.set(this, void 0);
|
||||
_LineDecoder_carriageReturnIndex.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, new Uint8Array(), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
}
|
||||
decode(chunk) {
|
||||
if (chunk == null) {
|
||||
return [];
|
||||
}
|
||||
const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk)
|
||||
: typeof chunk === 'string' ? encodeUTF8(chunk)
|
||||
: chunk;
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, concatBytes([__classPrivateFieldGet(this, _LineDecoder_buffer, "f"), binaryChunk]), "f");
|
||||
const lines = [];
|
||||
let patternIndex;
|
||||
while ((patternIndex = findNewlineIndex(__classPrivateFieldGet(this, _LineDecoder_buffer, "f"), __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f"))) != null) {
|
||||
if (patternIndex.carriage && __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") == null) {
|
||||
// skip until we either get a corresponding `\n`, a new `\r` or nothing
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, patternIndex.index, "f");
|
||||
continue;
|
||||
}
|
||||
// we got double \r or \rtext\n
|
||||
if (__classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") != null &&
|
||||
(patternIndex.index !== __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") + 1 || patternIndex.carriage)) {
|
||||
lines.push(decodeUTF8(__classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(0, __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") - 1)));
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, __classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(__classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f")), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
continue;
|
||||
}
|
||||
const endIndex = __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") !== null ? patternIndex.preceding - 1 : patternIndex.preceding;
|
||||
const line = decodeUTF8(__classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(0, endIndex));
|
||||
lines.push(line);
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, __classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(patternIndex.index), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
}
|
||||
return lines;
|
||||
}
|
||||
flush() {
|
||||
if (!__classPrivateFieldGet(this, _LineDecoder_buffer, "f").length) {
|
||||
return [];
|
||||
}
|
||||
return this.decode('\n');
|
||||
}
|
||||
}
|
||||
_LineDecoder_buffer = new WeakMap(), _LineDecoder_carriageReturnIndex = new WeakMap();
|
||||
// prettier-ignore
|
||||
LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']);
|
||||
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g;
|
||||
/**
|
||||
* This function searches the buffer for the end patterns, (\r or \n)
|
||||
* and returns an object with the index preceding the matched newline and the
|
||||
* index after the newline char. `null` is returned if no new line is found.
|
||||
*
|
||||
* ```ts
|
||||
* findNewLineIndex('abc\ndef') -> { preceding: 2, index: 3 }
|
||||
* ```
|
||||
*/
|
||||
function findNewlineIndex(buffer, startIndex) {
|
||||
const newline = 0x0a; // \n
|
||||
const carriage = 0x0d; // \r
|
||||
for (let i = startIndex ?? 0; i < buffer.length; i++) {
|
||||
if (buffer[i] === newline) {
|
||||
return { preceding: i, index: i + 1, carriage: false };
|
||||
}
|
||||
if (buffer[i] === carriage) {
|
||||
return { preceding: i, index: i + 1, carriage: true };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
export function findDoubleNewlineIndex(buffer) {
|
||||
// This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n)
|
||||
// and returns the index right after the first occurrence of any pattern,
|
||||
// or -1 if none of the patterns are found.
|
||||
const newline = 0x0a; // \n
|
||||
const carriage = 0x0d; // \r
|
||||
for (let i = 0; i < buffer.length - 1; i++) {
|
||||
if (buffer[i] === newline && buffer[i + 1] === newline) {
|
||||
// \n\n
|
||||
return i + 2;
|
||||
}
|
||||
if (buffer[i] === carriage && buffer[i + 1] === carriage) {
|
||||
// \r\r
|
||||
return i + 2;
|
||||
}
|
||||
if (buffer[i] === carriage &&
|
||||
buffer[i + 1] === newline &&
|
||||
i + 3 < buffer.length &&
|
||||
buffer[i + 2] === carriage &&
|
||||
buffer[i + 3] === newline) {
|
||||
// \r\n\r\n
|
||||
return i + 4;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
//# sourceMappingURL=line.mjs.map
|
||||
157
extracted-source/node_modules/@anthropic-ai/sdk/internal/detect-platform.mjs
generated
vendored
Normal file
157
extracted-source/node_modules/@anthropic-ai/sdk/internal/detect-platform.mjs
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { VERSION } from "../version.mjs";
|
||||
export const isRunningInBrowser = () => {
|
||||
return (
|
||||
// @ts-ignore
|
||||
typeof window !== 'undefined' &&
|
||||
// @ts-ignore
|
||||
typeof window.document !== 'undefined' &&
|
||||
// @ts-ignore
|
||||
typeof navigator !== 'undefined');
|
||||
};
|
||||
/**
|
||||
* Note this does not detect 'browser'; for that, use getBrowserInfo().
|
||||
*/
|
||||
function getDetectedPlatform() {
|
||||
if (typeof Deno !== 'undefined' && Deno.build != null) {
|
||||
return 'deno';
|
||||
}
|
||||
if (typeof EdgeRuntime !== 'undefined') {
|
||||
return 'edge';
|
||||
}
|
||||
if (Object.prototype.toString.call(typeof globalThis.process !== 'undefined' ? globalThis.process : 0) === '[object process]') {
|
||||
return 'node';
|
||||
}
|
||||
return 'unknown';
|
||||
}
|
||||
const getPlatformProperties = () => {
|
||||
const detectedPlatform = getDetectedPlatform();
|
||||
if (detectedPlatform === 'deno') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': normalizePlatform(Deno.build.os),
|
||||
'X-Stainless-Arch': normalizeArch(Deno.build.arch),
|
||||
'X-Stainless-Runtime': 'deno',
|
||||
'X-Stainless-Runtime-Version': typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown',
|
||||
};
|
||||
}
|
||||
if (typeof EdgeRuntime !== 'undefined') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': `other:${EdgeRuntime}`,
|
||||
'X-Stainless-Runtime': 'edge',
|
||||
'X-Stainless-Runtime-Version': globalThis.process.version,
|
||||
};
|
||||
}
|
||||
// Check if Node.js
|
||||
if (detectedPlatform === 'node') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': normalizePlatform(globalThis.process.platform ?? 'unknown'),
|
||||
'X-Stainless-Arch': normalizeArch(globalThis.process.arch ?? 'unknown'),
|
||||
'X-Stainless-Runtime': 'node',
|
||||
'X-Stainless-Runtime-Version': globalThis.process.version ?? 'unknown',
|
||||
};
|
||||
}
|
||||
const browserInfo = getBrowserInfo();
|
||||
if (browserInfo) {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': 'unknown',
|
||||
'X-Stainless-Runtime': `browser:${browserInfo.browser}`,
|
||||
'X-Stainless-Runtime-Version': browserInfo.version,
|
||||
};
|
||||
}
|
||||
// TODO add support for Cloudflare workers, etc.
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': 'unknown',
|
||||
'X-Stainless-Runtime': 'unknown',
|
||||
'X-Stainless-Runtime-Version': 'unknown',
|
||||
};
|
||||
};
|
||||
// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts
|
||||
function getBrowserInfo() {
|
||||
if (typeof navigator === 'undefined' || !navigator) {
|
||||
return null;
|
||||
}
|
||||
// NOTE: The order matters here!
|
||||
const browserPatterns = [
|
||||
{ key: 'edge', pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'ie', pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'ie', pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'chrome', pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'firefox', pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'safari', pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ },
|
||||
];
|
||||
// Find the FIRST matching browser
|
||||
for (const { key, pattern } of browserPatterns) {
|
||||
const match = pattern.exec(navigator.userAgent);
|
||||
if (match) {
|
||||
const major = match[1] || 0;
|
||||
const minor = match[2] || 0;
|
||||
const patch = match[3] || 0;
|
||||
return { browser: key, version: `${major}.${minor}.${patch}` };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const normalizeArch = (arch) => {
|
||||
// Node docs:
|
||||
// - https://nodejs.org/api/process.html#processarch
|
||||
// Deno docs:
|
||||
// - https://doc.deno.land/deno/stable/~/Deno.build
|
||||
if (arch === 'x32')
|
||||
return 'x32';
|
||||
if (arch === 'x86_64' || arch === 'x64')
|
||||
return 'x64';
|
||||
if (arch === 'arm')
|
||||
return 'arm';
|
||||
if (arch === 'aarch64' || arch === 'arm64')
|
||||
return 'arm64';
|
||||
if (arch)
|
||||
return `other:${arch}`;
|
||||
return 'unknown';
|
||||
};
|
||||
const normalizePlatform = (platform) => {
|
||||
// Node platforms:
|
||||
// - https://nodejs.org/api/process.html#processplatform
|
||||
// Deno platforms:
|
||||
// - https://doc.deno.land/deno/stable/~/Deno.build
|
||||
// - https://github.com/denoland/deno/issues/14799
|
||||
platform = platform.toLowerCase();
|
||||
// NOTE: this iOS check is untested and may not work
|
||||
// Node does not work natively on IOS, there is a fork at
|
||||
// https://github.com/nodejs-mobile/nodejs-mobile
|
||||
// however it is unknown at the time of writing how to detect if it is running
|
||||
if (platform.includes('ios'))
|
||||
return 'iOS';
|
||||
if (platform === 'android')
|
||||
return 'Android';
|
||||
if (platform === 'darwin')
|
||||
return 'MacOS';
|
||||
if (platform === 'win32')
|
||||
return 'Windows';
|
||||
if (platform === 'freebsd')
|
||||
return 'FreeBSD';
|
||||
if (platform === 'openbsd')
|
||||
return 'OpenBSD';
|
||||
if (platform === 'linux')
|
||||
return 'Linux';
|
||||
if (platform)
|
||||
return `Other:${platform}`;
|
||||
return 'Unknown';
|
||||
};
|
||||
let _platformHeaders;
|
||||
export const getPlatformHeaders = () => {
|
||||
return (_platformHeaders ?? (_platformHeaders = getPlatformProperties()));
|
||||
};
|
||||
//# sourceMappingURL=detect-platform.mjs.map
|
||||
36
extracted-source/node_modules/@anthropic-ai/sdk/internal/errors.mjs
generated
vendored
Normal file
36
extracted-source/node_modules/@anthropic-ai/sdk/internal/errors.mjs
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function isAbortError(err) {
|
||||
return (typeof err === 'object' &&
|
||||
err !== null &&
|
||||
// Spec-compliant fetch implementations
|
||||
(('name' in err && err.name === 'AbortError') ||
|
||||
// Expo fetch
|
||||
('message' in err && String(err.message).includes('FetchRequestCanceledException'))));
|
||||
}
|
||||
export const castToError = (err) => {
|
||||
if (err instanceof Error)
|
||||
return err;
|
||||
if (typeof err === 'object' && err !== null) {
|
||||
try {
|
||||
if (Object.prototype.toString.call(err) === '[object Error]') {
|
||||
// @ts-ignore - not all envs have native support for cause yet
|
||||
const error = new Error(err.message, err.cause ? { cause: err.cause } : {});
|
||||
if (err.stack)
|
||||
error.stack = err.stack;
|
||||
// @ts-ignore - not all envs have native support for cause yet
|
||||
if (err.cause && !error.cause)
|
||||
error.cause = err.cause;
|
||||
if (err.name)
|
||||
error.name = err.name;
|
||||
return error;
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
return new Error(JSON.stringify(err));
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
return new Error(err);
|
||||
};
|
||||
//# sourceMappingURL=errors.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/headers.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/headers.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { isReadonlyArray } from "./utils/values.mjs";
|
||||
const brand_privateNullableHeaders = Symbol.for('brand.privateNullableHeaders');
|
||||
function* iterateHeaders(headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
if (brand_privateNullableHeaders in headers) {
|
||||
const { values, nulls } = headers;
|
||||
yield* values.entries();
|
||||
for (const name of nulls) {
|
||||
yield [name, null];
|
||||
}
|
||||
return;
|
||||
}
|
||||
let shouldClear = false;
|
||||
let iter;
|
||||
if (headers instanceof Headers) {
|
||||
iter = headers.entries();
|
||||
}
|
||||
else if (isReadonlyArray(headers)) {
|
||||
iter = headers;
|
||||
}
|
||||
else {
|
||||
shouldClear = true;
|
||||
iter = Object.entries(headers ?? {});
|
||||
}
|
||||
for (let row of iter) {
|
||||
const name = row[0];
|
||||
if (typeof name !== 'string')
|
||||
throw new TypeError('expected header name to be a string');
|
||||
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
||||
let didClear = false;
|
||||
for (const value of values) {
|
||||
if (value === undefined)
|
||||
continue;
|
||||
// Objects keys always overwrite older headers, they never append.
|
||||
// Yield a null to clear the header before adding the new values.
|
||||
if (shouldClear && !didClear) {
|
||||
didClear = true;
|
||||
yield [name, null];
|
||||
}
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
export const buildHeaders = (newHeaders) => {
|
||||
const targetHeaders = new Headers();
|
||||
const nullHeaders = new Set();
|
||||
for (const headers of newHeaders) {
|
||||
const seenHeaders = new Set();
|
||||
for (const [name, value] of iterateHeaders(headers)) {
|
||||
const lowerName = name.toLowerCase();
|
||||
if (!seenHeaders.has(lowerName)) {
|
||||
targetHeaders.delete(name);
|
||||
seenHeaders.add(lowerName);
|
||||
}
|
||||
if (value === null) {
|
||||
targetHeaders.delete(name);
|
||||
nullHeaders.add(lowerName);
|
||||
}
|
||||
else {
|
||||
targetHeaders.append(name, value);
|
||||
nullHeaders.delete(lowerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
||||
};
|
||||
export const isEmptyHeaders = (headers) => {
|
||||
for (const _ of iterateHeaders(headers))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
//# sourceMappingURL=headers.mjs.map
|
||||
56
extracted-source/node_modules/@anthropic-ai/sdk/internal/parse.mjs
generated
vendored
Normal file
56
extracted-source/node_modules/@anthropic-ai/sdk/internal/parse.mjs
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { Stream } from "../core/streaming.mjs";
|
||||
import { formatRequestDetails, loggerFor } from "./utils/log.mjs";
|
||||
export async function defaultParseResponse(client, props) {
|
||||
const { response, requestLogID, retryOfRequestLogID, startTime } = props;
|
||||
const body = await (async () => {
|
||||
if (props.options.stream) {
|
||||
loggerFor(client).debug('response', response.status, response.url, response.headers, response.body);
|
||||
// Note: there is an invariant here that isn't represented in the type system
|
||||
// that if you set `stream: true` the response type must also be `Stream<T>`
|
||||
if (props.options.__streamClass) {
|
||||
return props.options.__streamClass.fromSSEResponse(response, props.controller);
|
||||
}
|
||||
return Stream.fromSSEResponse(response, props.controller);
|
||||
}
|
||||
// fetch refuses to read the body when the status code is 204.
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
if (props.options.__binaryResponse) {
|
||||
return response;
|
||||
}
|
||||
const contentType = response.headers.get('content-type');
|
||||
const mediaType = contentType?.split(';')[0]?.trim();
|
||||
const isJSON = mediaType?.includes('application/json') || mediaType?.endsWith('+json');
|
||||
if (isJSON) {
|
||||
const contentLength = response.headers.get('content-length');
|
||||
if (contentLength === '0') {
|
||||
// if there is no content we can't do anything
|
||||
return undefined;
|
||||
}
|
||||
const json = await response.json();
|
||||
return addRequestID(json, response);
|
||||
}
|
||||
const text = await response.text();
|
||||
return text;
|
||||
})();
|
||||
loggerFor(client).debug(`[${requestLogID}] response parsed`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
body,
|
||||
durationMs: Date.now() - startTime,
|
||||
}));
|
||||
return body;
|
||||
}
|
||||
export function addRequestID(value, response) {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
return Object.defineProperty(value, '_request_id', {
|
||||
value: response.headers.get('request-id'),
|
||||
enumerable: false,
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=parse.mjs.map
|
||||
10
extracted-source/node_modules/@anthropic-ai/sdk/internal/request-options.mjs
generated
vendored
Normal file
10
extracted-source/node_modules/@anthropic-ai/sdk/internal/request-options.mjs
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export const FallbackEncoder = ({ headers, body }) => {
|
||||
return {
|
||||
bodyHeaders: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
};
|
||||
};
|
||||
//# sourceMappingURL=request-options.mjs.map
|
||||
85
extracted-source/node_modules/@anthropic-ai/sdk/internal/shims.mjs
generated
vendored
Normal file
85
extracted-source/node_modules/@anthropic-ai/sdk/internal/shims.mjs
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function getDefaultFetch() {
|
||||
if (typeof fetch !== 'undefined') {
|
||||
return fetch;
|
||||
}
|
||||
throw new Error('`fetch` is not defined as a global; Either pass `fetch` to the client, `new Anthropic({ fetch })` or polyfill the global, `globalThis.fetch = fetch`');
|
||||
}
|
||||
export function makeReadableStream(...args) {
|
||||
const ReadableStream = globalThis.ReadableStream;
|
||||
if (typeof ReadableStream === 'undefined') {
|
||||
// Note: All of the platforms / runtimes we officially support already define
|
||||
// `ReadableStream` as a global, so this should only ever be hit on unsupported runtimes.
|
||||
throw new Error('`ReadableStream` is not defined as a global; You will need to polyfill it, `globalThis.ReadableStream = ReadableStream`');
|
||||
}
|
||||
return new ReadableStream(...args);
|
||||
}
|
||||
export function ReadableStreamFrom(iterable) {
|
||||
let iter = Symbol.asyncIterator in iterable ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
|
||||
return makeReadableStream({
|
||||
start() { },
|
||||
async pull(controller) {
|
||||
const { done, value } = await iter.next();
|
||||
if (done) {
|
||||
controller.close();
|
||||
}
|
||||
else {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await iter.return?.();
|
||||
},
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Most browsers don't yet have async iterable support for ReadableStream,
|
||||
* and Node has a very different way of reading bytes from its "ReadableStream".
|
||||
*
|
||||
* This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
|
||||
*/
|
||||
export function ReadableStreamToAsyncIterable(stream) {
|
||||
if (stream[Symbol.asyncIterator])
|
||||
return stream;
|
||||
const reader = stream.getReader();
|
||||
return {
|
||||
async next() {
|
||||
try {
|
||||
const result = await reader.read();
|
||||
if (result?.done)
|
||||
reader.releaseLock(); // release lock when stream becomes closed
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
reader.releaseLock(); // release lock when stream becomes errored
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async return() {
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
return { done: true, value: undefined };
|
||||
},
|
||||
[Symbol.asyncIterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Cancels a ReadableStream we don't need to consume.
|
||||
* See https://undici.nodejs.org/#/?id=garbage-collection
|
||||
*/
|
||||
export async function CancelReadableStream(stream) {
|
||||
if (stream === null || typeof stream !== 'object')
|
||||
return;
|
||||
if (stream[Symbol.asyncIterator]) {
|
||||
await stream[Symbol.asyncIterator]().return?.();
|
||||
return;
|
||||
}
|
||||
const reader = stream.getReader();
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
}
|
||||
//# sourceMappingURL=shims.mjs.map
|
||||
93
extracted-source/node_modules/@anthropic-ai/sdk/internal/to-file.mjs
generated
vendored
Normal file
93
extracted-source/node_modules/@anthropic-ai/sdk/internal/to-file.mjs
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
import { getName, makeFile, isAsyncIterable } from "./uploads.mjs";
|
||||
import { checkFileSupport } from "./uploads.mjs";
|
||||
/**
|
||||
* This check adds the arrayBuffer() method type because it is available and used at runtime
|
||||
*/
|
||||
const isBlobLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.size === 'number' &&
|
||||
typeof value.type === 'string' &&
|
||||
typeof value.text === 'function' &&
|
||||
typeof value.slice === 'function' &&
|
||||
typeof value.arrayBuffer === 'function';
|
||||
/**
|
||||
* This check adds the arrayBuffer() method type because it is available and used at runtime
|
||||
*/
|
||||
const isFileLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.name === 'string' &&
|
||||
typeof value.lastModified === 'number' &&
|
||||
isBlobLike(value);
|
||||
const isResponseLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.url === 'string' &&
|
||||
typeof value.blob === 'function';
|
||||
/**
|
||||
* Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats
|
||||
* @param value the raw content of the file. Can be an {@link Uploadable}, BlobLikePart, or AsyncIterable of BlobLikeParts
|
||||
* @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible
|
||||
* @param {Object=} options additional properties
|
||||
* @param {string=} options.type the MIME type of the content
|
||||
* @param {number=} options.lastModified the last modified timestamp
|
||||
* @returns a {@link File} with the given properties
|
||||
*/
|
||||
export async function toFile(value, name, options) {
|
||||
checkFileSupport();
|
||||
// If it's a promise, resolve it.
|
||||
value = await value;
|
||||
name || (name = getName(value, true));
|
||||
// If we've been given a `File` we don't need to do anything if the name / options
|
||||
// have not been customised.
|
||||
if (isFileLike(value)) {
|
||||
if (value instanceof File && name == null && options == null) {
|
||||
return value;
|
||||
}
|
||||
return makeFile([await value.arrayBuffer()], name ?? value.name, {
|
||||
type: value.type,
|
||||
lastModified: value.lastModified,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
if (isResponseLike(value)) {
|
||||
const blob = await value.blob();
|
||||
name || (name = new URL(value.url).pathname.split(/[\\/]/).pop());
|
||||
return makeFile(await getBytes(blob), name, options);
|
||||
}
|
||||
const parts = await getBytes(value);
|
||||
if (!options?.type) {
|
||||
const type = parts.find((part) => typeof part === 'object' && 'type' in part && part.type);
|
||||
if (typeof type === 'string') {
|
||||
options = { ...options, type };
|
||||
}
|
||||
}
|
||||
return makeFile(parts, name, options);
|
||||
}
|
||||
async function getBytes(value) {
|
||||
let parts = [];
|
||||
if (typeof value === 'string' ||
|
||||
ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc.
|
||||
value instanceof ArrayBuffer) {
|
||||
parts.push(value);
|
||||
}
|
||||
else if (isBlobLike(value)) {
|
||||
parts.push(value instanceof Blob ? value : await value.arrayBuffer());
|
||||
}
|
||||
else if (isAsyncIterable(value) // includes Readable, ReadableStream, etc.
|
||||
) {
|
||||
for await (const chunk of value) {
|
||||
parts.push(...(await getBytes(chunk))); // TODO, consider validating?
|
||||
}
|
||||
}
|
||||
else {
|
||||
const constructor = value?.constructor?.name;
|
||||
throw new Error(`Unexpected data type: ${typeof value}${constructor ? `; constructor: ${constructor}` : ''}${propsForError(value)}`);
|
||||
}
|
||||
return parts;
|
||||
}
|
||||
function propsForError(value) {
|
||||
if (typeof value !== 'object' || value === null)
|
||||
return '';
|
||||
const props = Object.getOwnPropertyNames(value);
|
||||
return `; props: [${props.map((p) => `"${p}"`).join(', ')}]`;
|
||||
}
|
||||
//# sourceMappingURL=to-file.mjs.map
|
||||
17
extracted-source/node_modules/@anthropic-ai/sdk/internal/tslib.mjs
generated
vendored
Normal file
17
extracted-source/node_modules/@anthropic-ai/sdk/internal/tslib.mjs
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
function __classPrivateFieldSet(receiver, state, value, kind, f) {
|
||||
if (kind === "m")
|
||||
throw new TypeError("Private method is not writable");
|
||||
if (kind === "a" && !f)
|
||||
throw new TypeError("Private accessor was defined without a setter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
||||
throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||||
return kind === "a" ? f.call(receiver, value) : f ? (f.value = value) : state.set(receiver, value), value;
|
||||
}
|
||||
function __classPrivateFieldGet(receiver, state, kind, f) {
|
||||
if (kind === "a" && !f)
|
||||
throw new TypeError("Private accessor was defined without a getter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
||||
throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||||
}
|
||||
export { __classPrivateFieldSet, __classPrivateFieldGet };
|
||||
135
extracted-source/node_modules/@anthropic-ai/sdk/internal/uploads.mjs
generated
vendored
Normal file
135
extracted-source/node_modules/@anthropic-ai/sdk/internal/uploads.mjs
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
import { ReadableStreamFrom } from "./shims.mjs";
|
||||
export const checkFileSupport = () => {
|
||||
if (typeof File === 'undefined') {
|
||||
const { process } = globalThis;
|
||||
const isOldNode = typeof process?.versions?.node === 'string' && parseInt(process.versions.node.split('.')) < 20;
|
||||
throw new Error('`File` is not defined as a global, which is required for file uploads.' +
|
||||
(isOldNode ?
|
||||
" Update to Node 20 LTS or newer, or set `globalThis.File` to `import('node:buffer').File`."
|
||||
: ''));
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Construct a `File` instance. This is used to ensure a helpful error is thrown
|
||||
* for environments that don't define a global `File` yet.
|
||||
*/
|
||||
export function makeFile(fileBits, fileName, options) {
|
||||
checkFileSupport();
|
||||
return new File(fileBits, fileName ?? 'unknown_file', options);
|
||||
}
|
||||
export function getName(value, stripPath) {
|
||||
const val = (typeof value === 'object' &&
|
||||
value !== null &&
|
||||
(('name' in value && value.name && String(value.name)) ||
|
||||
('url' in value && value.url && String(value.url)) ||
|
||||
('filename' in value && value.filename && String(value.filename)) ||
|
||||
('path' in value && value.path && String(value.path)))) ||
|
||||
'';
|
||||
return stripPath ? val.split(/[\\/]/).pop() || undefined : val;
|
||||
}
|
||||
export const isAsyncIterable = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function';
|
||||
/**
|
||||
* Returns a multipart/form-data request if any part of the given request body contains a File / Blob value.
|
||||
* Otherwise returns the request as is.
|
||||
*/
|
||||
export const maybeMultipartFormRequestOptions = async (opts, fetch) => {
|
||||
if (!hasUploadableValue(opts.body))
|
||||
return opts;
|
||||
return { ...opts, body: await createForm(opts.body, fetch) };
|
||||
};
|
||||
export const multipartFormRequestOptions = async (opts, fetch, stripFilenames = true) => {
|
||||
return { ...opts, body: await createForm(opts.body, fetch, stripFilenames) };
|
||||
};
|
||||
const supportsFormDataMap = /* @__PURE__ */ new WeakMap();
|
||||
/**
|
||||
* node-fetch doesn't support the global FormData object in recent node versions. Instead of sending
|
||||
* properly-encoded form data, it just stringifies the object, resulting in a request body of "[object FormData]".
|
||||
* This function detects if the fetch function provided supports the global FormData object to avoid
|
||||
* confusing error messages later on.
|
||||
*/
|
||||
function supportsFormData(fetchObject) {
|
||||
const fetch = typeof fetchObject === 'function' ? fetchObject : fetchObject.fetch;
|
||||
const cached = supportsFormDataMap.get(fetch);
|
||||
if (cached)
|
||||
return cached;
|
||||
const promise = (async () => {
|
||||
try {
|
||||
const FetchResponse = ('Response' in fetch ?
|
||||
fetch.Response
|
||||
: (await fetch('data:,')).constructor);
|
||||
const data = new FormData();
|
||||
if (data.toString() === (await new FetchResponse(data).text())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
// avoid false negatives
|
||||
return true;
|
||||
}
|
||||
})();
|
||||
supportsFormDataMap.set(fetch, promise);
|
||||
return promise;
|
||||
}
|
||||
export const createForm = async (body, fetch, stripFilenames = true) => {
|
||||
if (!(await supportsFormData(fetch))) {
|
||||
throw new TypeError('The provided fetch function does not support file uploads with the current global FormData class.');
|
||||
}
|
||||
const form = new FormData();
|
||||
await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value, stripFilenames)));
|
||||
return form;
|
||||
};
|
||||
// We check for Blob not File because Bun.File doesn't inherit from File,
|
||||
// but they both inherit from Blob and have a `name` property at runtime.
|
||||
const isNamedBlob = (value) => value instanceof Blob && 'name' in value;
|
||||
const isUploadable = (value) => typeof value === 'object' &&
|
||||
value !== null &&
|
||||
(value instanceof Response || isAsyncIterable(value) || isNamedBlob(value));
|
||||
const hasUploadableValue = (value) => {
|
||||
if (isUploadable(value))
|
||||
return true;
|
||||
if (Array.isArray(value))
|
||||
return value.some(hasUploadableValue);
|
||||
if (value && typeof value === 'object') {
|
||||
for (const k in value) {
|
||||
if (hasUploadableValue(value[k]))
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const addFormValue = async (form, key, value, stripFilenames) => {
|
||||
if (value === undefined)
|
||||
return;
|
||||
if (value == null) {
|
||||
throw new TypeError(`Received null for "${key}"; to pass null in FormData, you must use the string 'null'`);
|
||||
}
|
||||
// TODO: make nested formats configurable
|
||||
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
|
||||
form.append(key, String(value));
|
||||
}
|
||||
else if (value instanceof Response) {
|
||||
let options = {};
|
||||
const contentType = value.headers.get('Content-Type');
|
||||
if (contentType) {
|
||||
options = { type: contentType };
|
||||
}
|
||||
form.append(key, makeFile([await value.blob()], getName(value, stripFilenames), options));
|
||||
}
|
||||
else if (isAsyncIterable(value)) {
|
||||
form.append(key, makeFile([await new Response(ReadableStreamFrom(value)).blob()], getName(value, stripFilenames)));
|
||||
}
|
||||
else if (isNamedBlob(value)) {
|
||||
form.append(key, makeFile([value], getName(value, stripFilenames), { type: value.type }));
|
||||
}
|
||||
else if (Array.isArray(value)) {
|
||||
await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry, stripFilenames)));
|
||||
}
|
||||
else if (typeof value === 'object') {
|
||||
await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop, stripFilenames)));
|
||||
}
|
||||
else {
|
||||
throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`);
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=uploads.mjs.map
|
||||
26
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/bytes.mjs
generated
vendored
Normal file
26
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/bytes.mjs
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
export function concatBytes(buffers) {
|
||||
let length = 0;
|
||||
for (const buffer of buffers) {
|
||||
length += buffer.length;
|
||||
}
|
||||
const output = new Uint8Array(length);
|
||||
let index = 0;
|
||||
for (const buffer of buffers) {
|
||||
output.set(buffer, index);
|
||||
index += buffer.length;
|
||||
}
|
||||
return output;
|
||||
}
|
||||
let encodeUTF8_;
|
||||
export function encodeUTF8(str) {
|
||||
let encoder;
|
||||
return (encodeUTF8_ ??
|
||||
((encoder = new globalThis.TextEncoder()), (encodeUTF8_ = encoder.encode.bind(encoder))))(str);
|
||||
}
|
||||
let decodeUTF8_;
|
||||
export function decodeUTF8(bytes) {
|
||||
let decoder;
|
||||
return (decodeUTF8_ ??
|
||||
((decoder = new globalThis.TextDecoder()), (decodeUTF8_ = decoder.decode.bind(decoder))))(bytes);
|
||||
}
|
||||
//# sourceMappingURL=bytes.mjs.map
|
||||
18
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/env.mjs
generated
vendored
Normal file
18
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/env.mjs
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* Read an environment variable.
|
||||
*
|
||||
* Trims beginning and trailing whitespace.
|
||||
*
|
||||
* Will return undefined if the environment variable doesn't exist or cannot be accessed.
|
||||
*/
|
||||
export const readEnv = (env) => {
|
||||
if (typeof globalThis.process !== 'undefined') {
|
||||
return globalThis.process.env?.[env]?.trim() ?? undefined;
|
||||
}
|
||||
if (typeof globalThis.Deno !== 'undefined') {
|
||||
return globalThis.Deno.env?.get?.(env)?.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
//# sourceMappingURL=env.mjs.map
|
||||
80
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/log.mjs
generated
vendored
Normal file
80
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/log.mjs
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { hasOwn } from "./values.mjs";
|
||||
const levelNumbers = {
|
||||
off: 0,
|
||||
error: 200,
|
||||
warn: 300,
|
||||
info: 400,
|
||||
debug: 500,
|
||||
};
|
||||
export const parseLogLevel = (maybeLevel, sourceName, client) => {
|
||||
if (!maybeLevel) {
|
||||
return undefined;
|
||||
}
|
||||
if (hasOwn(levelNumbers, maybeLevel)) {
|
||||
return maybeLevel;
|
||||
}
|
||||
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
||||
return undefined;
|
||||
};
|
||||
function noop() { }
|
||||
function makeLogFn(fnLevel, logger, logLevel) {
|
||||
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
||||
return noop;
|
||||
}
|
||||
else {
|
||||
// Don't wrap logger functions, we want the stacktrace intact!
|
||||
return logger[fnLevel].bind(logger);
|
||||
}
|
||||
}
|
||||
const noopLogger = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
debug: noop,
|
||||
};
|
||||
let cachedLoggers = /* @__PURE__ */ new WeakMap();
|
||||
export function loggerFor(client) {
|
||||
const logger = client.logger;
|
||||
const logLevel = client.logLevel ?? 'off';
|
||||
if (!logger) {
|
||||
return noopLogger;
|
||||
}
|
||||
const cachedLogger = cachedLoggers.get(logger);
|
||||
if (cachedLogger && cachedLogger[0] === logLevel) {
|
||||
return cachedLogger[1];
|
||||
}
|
||||
const levelLogger = {
|
||||
error: makeLogFn('error', logger, logLevel),
|
||||
warn: makeLogFn('warn', logger, logLevel),
|
||||
info: makeLogFn('info', logger, logLevel),
|
||||
debug: makeLogFn('debug', logger, logLevel),
|
||||
};
|
||||
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
||||
return levelLogger;
|
||||
}
|
||||
export const formatRequestDetails = (details) => {
|
||||
if (details.options) {
|
||||
details.options = { ...details.options };
|
||||
delete details.options['headers']; // redundant + leaks internals
|
||||
}
|
||||
if (details.headers) {
|
||||
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
||||
name,
|
||||
(name.toLowerCase() === 'x-api-key' ||
|
||||
name.toLowerCase() === 'authorization' ||
|
||||
name.toLowerCase() === 'cookie' ||
|
||||
name.toLowerCase() === 'set-cookie') ?
|
||||
'***'
|
||||
: value,
|
||||
]));
|
||||
}
|
||||
if ('retryOfRequestLogID' in details) {
|
||||
if (details.retryOfRequestLogID) {
|
||||
details.retryOf = details.retryOfRequestLogID;
|
||||
}
|
||||
delete details.retryOfRequestLogID;
|
||||
}
|
||||
return details;
|
||||
};
|
||||
//# sourceMappingURL=log.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/path.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/path.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
/**
|
||||
* Percent-encode everything that isn't safe to have in a path without encoding safe chars.
|
||||
*
|
||||
* Taken from https://datatracker.ietf.org/doc/html/rfc3986#section-3.3:
|
||||
* > unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
|
||||
* > sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
|
||||
* > pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
|
||||
*/
|
||||
export function encodeURIPath(str) {
|
||||
return str.replace(/[^A-Za-z0-9\-._~!$&'()*+,;=:@]+/g, encodeURIComponent);
|
||||
}
|
||||
const EMPTY = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.create(null));
|
||||
export const createPathTagFunction = (pathEncoder = encodeURIPath) => function path(statics, ...params) {
|
||||
// If there are no params, no processing is needed.
|
||||
if (statics.length === 1)
|
||||
return statics[0];
|
||||
let postPath = false;
|
||||
const invalidSegments = [];
|
||||
const path = statics.reduce((previousValue, currentValue, index) => {
|
||||
if (/[?#]/.test(currentValue)) {
|
||||
postPath = true;
|
||||
}
|
||||
const value = params[index];
|
||||
let encoded = (postPath ? encodeURIComponent : pathEncoder)('' + value);
|
||||
if (index !== params.length &&
|
||||
(value == null ||
|
||||
(typeof value === 'object' &&
|
||||
// handle values from other realms
|
||||
value.toString ===
|
||||
Object.getPrototypeOf(Object.getPrototypeOf(value.hasOwnProperty ?? EMPTY) ?? EMPTY)
|
||||
?.toString))) {
|
||||
encoded = value + '';
|
||||
invalidSegments.push({
|
||||
start: previousValue.length + currentValue.length,
|
||||
length: encoded.length,
|
||||
error: `Value of type ${Object.prototype.toString
|
||||
.call(value)
|
||||
.slice(8, -1)} is not a valid path parameter`,
|
||||
});
|
||||
}
|
||||
return previousValue + currentValue + (index === params.length ? '' : encoded);
|
||||
}, '');
|
||||
const pathOnly = path.split(/[?#]/, 1)[0];
|
||||
const invalidSegmentPattern = /(?<=^|\/)(?:\.|%2e){1,2}(?=\/|$)/gi;
|
||||
let match;
|
||||
// Find all invalid segments
|
||||
while ((match = invalidSegmentPattern.exec(pathOnly)) !== null) {
|
||||
invalidSegments.push({
|
||||
start: match.index,
|
||||
length: match[0].length,
|
||||
error: `Value "${match[0]}" can\'t be safely passed as a path parameter`,
|
||||
});
|
||||
}
|
||||
invalidSegments.sort((a, b) => a.start - b.start);
|
||||
if (invalidSegments.length > 0) {
|
||||
let lastEnd = 0;
|
||||
const underline = invalidSegments.reduce((acc, segment) => {
|
||||
const spaces = ' '.repeat(segment.start - lastEnd);
|
||||
const arrows = '^'.repeat(segment.length);
|
||||
lastEnd = segment.start + segment.length;
|
||||
return acc + spaces + arrows;
|
||||
}, '');
|
||||
throw new AnthropicError(`Path parameters result in path with invalid segments:\n${invalidSegments
|
||||
.map((e) => e.error)
|
||||
.join('\n')}\n${path}\n${underline}`);
|
||||
}
|
||||
return path;
|
||||
};
|
||||
/**
|
||||
* URI-encodes path params and ensures no unsafe /./ or /../ path segments are introduced.
|
||||
*/
|
||||
export const path = /* @__PURE__ */ createPathTagFunction(encodeURIPath);
|
||||
//# sourceMappingURL=path.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/sleep.mjs
generated
vendored
Normal file
3
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/sleep.mjs
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
//# sourceMappingURL=sleep.mjs.map
|
||||
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/uuid.mjs
generated
vendored
Normal file
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/uuid.mjs
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* https://stackoverflow.com/a/2117523
|
||||
*/
|
||||
export let uuid4 = function () {
|
||||
const { crypto } = globalThis;
|
||||
if (crypto?.randomUUID) {
|
||||
uuid4 = crypto.randomUUID.bind(crypto);
|
||||
return crypto.randomUUID();
|
||||
}
|
||||
const u8 = new Uint8Array(1);
|
||||
const randomByte = crypto ? () => crypto.getRandomValues(u8)[0] : () => (Math.random() * 0xff) & 0xff;
|
||||
return '10000000-1000-4000-8000-100000000000'.replace(/[018]/g, (c) => (+c ^ (randomByte() & (15 >> (+c / 4)))).toString(16));
|
||||
};
|
||||
//# sourceMappingURL=uuid.mjs.map
|
||||
100
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/values.mjs
generated
vendored
Normal file
100
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/values.mjs
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
// https://url.spec.whatwg.org/#url-scheme-string
|
||||
const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i;
|
||||
export const isAbsoluteURL = (url) => {
|
||||
return startsWithSchemeRegexp.test(url);
|
||||
};
|
||||
export let isArray = (val) => ((isArray = Array.isArray), isArray(val));
|
||||
export let isReadonlyArray = isArray;
|
||||
/** Returns an object if the given value isn't an object, otherwise returns as-is */
|
||||
export function maybeObj(x) {
|
||||
if (typeof x !== 'object') {
|
||||
return {};
|
||||
}
|
||||
return x ?? {};
|
||||
}
|
||||
// https://stackoverflow.com/a/34491287
|
||||
export function isEmptyObj(obj) {
|
||||
if (!obj)
|
||||
return true;
|
||||
for (const _k in obj)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
// https://eslint.org/docs/latest/rules/no-prototype-builtins
|
||||
export function hasOwn(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
export function isObj(obj) {
|
||||
return obj != null && typeof obj === 'object' && !Array.isArray(obj);
|
||||
}
|
||||
export const ensurePresent = (value) => {
|
||||
if (value == null) {
|
||||
throw new AnthropicError(`Expected a value to be given but received ${value} instead.`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
export const validatePositiveInteger = (name, n) => {
|
||||
if (typeof n !== 'number' || !Number.isInteger(n)) {
|
||||
throw new AnthropicError(`${name} must be an integer`);
|
||||
}
|
||||
if (n < 0) {
|
||||
throw new AnthropicError(`${name} must be a positive integer`);
|
||||
}
|
||||
return n;
|
||||
};
|
||||
export const coerceInteger = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return Math.round(value);
|
||||
if (typeof value === 'string')
|
||||
return parseInt(value, 10);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceFloat = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return parseFloat(value);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceBoolean = (value) => {
|
||||
if (typeof value === 'boolean')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return value === 'true';
|
||||
return Boolean(value);
|
||||
};
|
||||
export const maybeCoerceInteger = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceInteger(value);
|
||||
};
|
||||
export const maybeCoerceFloat = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceFloat(value);
|
||||
};
|
||||
export const maybeCoerceBoolean = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceBoolean(value);
|
||||
};
|
||||
export const safeJSON = (text) => {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
// Gets a value from an object, deletes the key, and returns the value (or undefined if not found)
|
||||
export const pop = (obj, key) => {
|
||||
const value = obj[key];
|
||||
delete obj[key];
|
||||
return value;
|
||||
};
|
||||
//# sourceMappingURL=values.mjs.map
|
||||
616
extracted-source/node_modules/@anthropic-ai/sdk/lib/BetaMessageStream.mjs
generated
vendored
Normal file
616
extracted-source/node_modules/@anthropic-ai/sdk/lib/BetaMessageStream.mjs
generated
vendored
Normal file
@@ -0,0 +1,616 @@
|
||||
var _BetaMessageStream_instances, _BetaMessageStream_currentMessageSnapshot, _BetaMessageStream_params, _BetaMessageStream_connectedPromise, _BetaMessageStream_resolveConnectedPromise, _BetaMessageStream_rejectConnectedPromise, _BetaMessageStream_endPromise, _BetaMessageStream_resolveEndPromise, _BetaMessageStream_rejectEndPromise, _BetaMessageStream_listeners, _BetaMessageStream_ended, _BetaMessageStream_errored, _BetaMessageStream_aborted, _BetaMessageStream_catchingPromiseCreated, _BetaMessageStream_response, _BetaMessageStream_request_id, _BetaMessageStream_logger, _BetaMessageStream_getFinalMessage, _BetaMessageStream_getFinalText, _BetaMessageStream_handleError, _BetaMessageStream_beginRequest, _BetaMessageStream_addStreamEvent, _BetaMessageStream_endRequest, _BetaMessageStream_accumulateMessage;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../internal/tslib.mjs";
|
||||
import { partialParse } from "../_vendor/partial-json-parser/parser.mjs";
|
||||
import { AnthropicError, APIUserAbortError } from "../error.mjs";
|
||||
import { isAbortError } from "../internal/errors.mjs";
|
||||
import { Stream } from "../streaming.mjs";
|
||||
import { maybeParseBetaMessage } from "./beta-parser.mjs";
|
||||
const JSON_BUF_PROPERTY = '__json_buf';
|
||||
function tracksToolInput(content) {
|
||||
return content.type === 'tool_use' || content.type === 'server_tool_use' || content.type === 'mcp_tool_use';
|
||||
}
|
||||
export class BetaMessageStream {
|
||||
constructor(params, opts) {
|
||||
_BetaMessageStream_instances.add(this);
|
||||
this.messages = [];
|
||||
this.receivedMessages = [];
|
||||
_BetaMessageStream_currentMessageSnapshot.set(this, void 0);
|
||||
_BetaMessageStream_params.set(this, null);
|
||||
this.controller = new AbortController();
|
||||
_BetaMessageStream_connectedPromise.set(this, void 0);
|
||||
_BetaMessageStream_resolveConnectedPromise.set(this, () => { });
|
||||
_BetaMessageStream_rejectConnectedPromise.set(this, () => { });
|
||||
_BetaMessageStream_endPromise.set(this, void 0);
|
||||
_BetaMessageStream_resolveEndPromise.set(this, () => { });
|
||||
_BetaMessageStream_rejectEndPromise.set(this, () => { });
|
||||
_BetaMessageStream_listeners.set(this, {});
|
||||
_BetaMessageStream_ended.set(this, false);
|
||||
_BetaMessageStream_errored.set(this, false);
|
||||
_BetaMessageStream_aborted.set(this, false);
|
||||
_BetaMessageStream_catchingPromiseCreated.set(this, false);
|
||||
_BetaMessageStream_response.set(this, void 0);
|
||||
_BetaMessageStream_request_id.set(this, void 0);
|
||||
_BetaMessageStream_logger.set(this, void 0);
|
||||
_BetaMessageStream_handleError.set(this, (error) => {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_errored, true, "f");
|
||||
if (isAbortError(error)) {
|
||||
error = new APIUserAbortError();
|
||||
}
|
||||
if (error instanceof APIUserAbortError) {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_aborted, true, "f");
|
||||
return this._emit('abort', error);
|
||||
}
|
||||
if (error instanceof AnthropicError) {
|
||||
return this._emit('error', error);
|
||||
}
|
||||
if (error instanceof Error) {
|
||||
const anthropicError = new AnthropicError(error.message);
|
||||
// @ts-ignore
|
||||
anthropicError.cause = error;
|
||||
return this._emit('error', anthropicError);
|
||||
}
|
||||
return this._emit('error', new AnthropicError(String(error)));
|
||||
});
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_connectedPromise, new Promise((resolve, reject) => {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_resolveConnectedPromise, resolve, "f");
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_rejectConnectedPromise, reject, "f");
|
||||
}), "f");
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_endPromise, new Promise((resolve, reject) => {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_resolveEndPromise, resolve, "f");
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_rejectEndPromise, reject, "f");
|
||||
}), "f");
|
||||
// Don't let these promises cause unhandled rejection errors.
|
||||
// we will manually cause an unhandled rejection error later
|
||||
// if the user hasn't registered any error listener or called
|
||||
// any promise-returning method.
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_connectedPromise, "f").catch(() => { });
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_endPromise, "f").catch(() => { });
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_params, params, "f");
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_logger, opts?.logger ?? console, "f");
|
||||
}
|
||||
get response() {
|
||||
return __classPrivateFieldGet(this, _BetaMessageStream_response, "f");
|
||||
}
|
||||
get request_id() {
|
||||
return __classPrivateFieldGet(this, _BetaMessageStream_request_id, "f");
|
||||
}
|
||||
/**
|
||||
* Returns the `MessageStream` data, the raw `Response` instance and the ID of the request,
|
||||
* returned vie the `request-id` header which is useful for debugging requests and resporting
|
||||
* issues to Anthropic.
|
||||
*
|
||||
* This is the same as the `APIPromise.withResponse()` method.
|
||||
*
|
||||
* This method will raise an error if you created the stream using `MessageStream.fromReadableStream`
|
||||
* as no `Response` is available.
|
||||
*/
|
||||
async withResponse() {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_catchingPromiseCreated, true, "f");
|
||||
const response = await __classPrivateFieldGet(this, _BetaMessageStream_connectedPromise, "f");
|
||||
if (!response) {
|
||||
throw new Error('Could not resolve a `Response` object');
|
||||
}
|
||||
return {
|
||||
data: this,
|
||||
response,
|
||||
request_id: response.headers.get('request-id'),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Intended for use on the frontend, consuming a stream produced with
|
||||
* `.toReadableStream()` on the backend.
|
||||
*
|
||||
* Note that messages sent to the model do not appear in `.on('message')`
|
||||
* in this context.
|
||||
*/
|
||||
static fromReadableStream(stream) {
|
||||
const runner = new BetaMessageStream(null);
|
||||
runner._run(() => runner._fromReadableStream(stream));
|
||||
return runner;
|
||||
}
|
||||
static createMessage(messages, params, options, { logger } = {}) {
|
||||
const runner = new BetaMessageStream(params, { logger });
|
||||
for (const message of params.messages) {
|
||||
runner._addMessageParam(message);
|
||||
}
|
||||
__classPrivateFieldSet(runner, _BetaMessageStream_params, { ...params, stream: true }, "f");
|
||||
runner._run(() => runner._createMessage(messages, { ...params, stream: true }, { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } }));
|
||||
return runner;
|
||||
}
|
||||
_run(executor) {
|
||||
executor().then(() => {
|
||||
this._emitFinal();
|
||||
this._emit('end');
|
||||
}, __classPrivateFieldGet(this, _BetaMessageStream_handleError, "f"));
|
||||
}
|
||||
_addMessageParam(message) {
|
||||
this.messages.push(message);
|
||||
}
|
||||
_addMessage(message, emit = true) {
|
||||
this.receivedMessages.push(message);
|
||||
if (emit) {
|
||||
this._emit('message', message);
|
||||
}
|
||||
}
|
||||
async _createMessage(messages, params, options) {
|
||||
const signal = options?.signal;
|
||||
let abortHandler;
|
||||
if (signal) {
|
||||
if (signal.aborted)
|
||||
this.controller.abort();
|
||||
abortHandler = this.controller.abort.bind(this.controller);
|
||||
signal.addEventListener('abort', abortHandler);
|
||||
}
|
||||
try {
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_beginRequest).call(this);
|
||||
const { response, data: stream } = await messages
|
||||
.create({ ...params, stream: true }, { ...options, signal: this.controller.signal })
|
||||
.withResponse();
|
||||
this._connected(response);
|
||||
for await (const event of stream) {
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_addStreamEvent).call(this, event);
|
||||
}
|
||||
if (stream.controller.signal?.aborted) {
|
||||
throw new APIUserAbortError();
|
||||
}
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_endRequest).call(this);
|
||||
}
|
||||
finally {
|
||||
if (signal && abortHandler) {
|
||||
signal.removeEventListener('abort', abortHandler);
|
||||
}
|
||||
}
|
||||
}
|
||||
_connected(response) {
|
||||
if (this.ended)
|
||||
return;
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_response, response, "f");
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_request_id, response?.headers.get('request-id'), "f");
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_resolveConnectedPromise, "f").call(this, response);
|
||||
this._emit('connect');
|
||||
}
|
||||
get ended() {
|
||||
return __classPrivateFieldGet(this, _BetaMessageStream_ended, "f");
|
||||
}
|
||||
get errored() {
|
||||
return __classPrivateFieldGet(this, _BetaMessageStream_errored, "f");
|
||||
}
|
||||
get aborted() {
|
||||
return __classPrivateFieldGet(this, _BetaMessageStream_aborted, "f");
|
||||
}
|
||||
abort() {
|
||||
this.controller.abort();
|
||||
}
|
||||
/**
|
||||
* Adds the listener function to the end of the listeners array for the event.
|
||||
* No checks are made to see if the listener has already been added. Multiple calls passing
|
||||
* the same combination of event and listener will result in the listener being added, and
|
||||
* called, multiple times.
|
||||
* @returns this MessageStream, so that calls can be chained
|
||||
*/
|
||||
on(event, listener) {
|
||||
const listeners = __classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] = []);
|
||||
listeners.push({ listener });
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Removes the specified listener from the listener array for the event.
|
||||
* off() will remove, at most, one instance of a listener from the listener array. If any single
|
||||
* listener has been added multiple times to the listener array for the specified event, then
|
||||
* off() must be called multiple times to remove each instance.
|
||||
* @returns this MessageStream, so that calls can be chained
|
||||
*/
|
||||
off(event, listener) {
|
||||
const listeners = __classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event];
|
||||
if (!listeners)
|
||||
return this;
|
||||
const index = listeners.findIndex((l) => l.listener === listener);
|
||||
if (index >= 0)
|
||||
listeners.splice(index, 1);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Adds a one-time listener function for the event. The next time the event is triggered,
|
||||
* this listener is removed and then invoked.
|
||||
* @returns this MessageStream, so that calls can be chained
|
||||
*/
|
||||
once(event, listener) {
|
||||
const listeners = __classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] = []);
|
||||
listeners.push({ listener, once: true });
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* This is similar to `.once()`, but returns a Promise that resolves the next time
|
||||
* the event is triggered, instead of calling a listener callback.
|
||||
* @returns a Promise that resolves the next time given event is triggered,
|
||||
* or rejects if an error is emitted. (If you request the 'error' event,
|
||||
* returns a promise that resolves with the error).
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* const message = await stream.emitted('message') // rejects if the stream errors
|
||||
*/
|
||||
emitted(event) {
|
||||
return new Promise((resolve, reject) => {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_catchingPromiseCreated, true, "f");
|
||||
if (event !== 'error')
|
||||
this.once('error', reject);
|
||||
this.once(event, resolve);
|
||||
});
|
||||
}
|
||||
async done() {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_catchingPromiseCreated, true, "f");
|
||||
await __classPrivateFieldGet(this, _BetaMessageStream_endPromise, "f");
|
||||
}
|
||||
get currentMessage() {
|
||||
return __classPrivateFieldGet(this, _BetaMessageStream_currentMessageSnapshot, "f");
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the the final assistant Message response,
|
||||
* or rejects if an error occurred or the stream ended prematurely without producing a Message.
|
||||
* If structured outputs were used, this will be a ParsedMessage with a `parsed` field.
|
||||
*/
|
||||
async finalMessage() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_getFinalMessage).call(this);
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the the final assistant Message's text response, concatenated
|
||||
* together if there are more than one text blocks.
|
||||
* Rejects if an error occurred or the stream ended prematurely without producing a Message.
|
||||
*/
|
||||
async finalText() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_getFinalText).call(this);
|
||||
}
|
||||
_emit(event, ...args) {
|
||||
// make sure we don't emit any MessageStreamEvents after end
|
||||
if (__classPrivateFieldGet(this, _BetaMessageStream_ended, "f"))
|
||||
return;
|
||||
if (event === 'end') {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_ended, true, "f");
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_resolveEndPromise, "f").call(this);
|
||||
}
|
||||
const listeners = __classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event];
|
||||
if (listeners) {
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_listeners, "f")[event] = listeners.filter((l) => !l.once);
|
||||
listeners.forEach(({ listener }) => listener(...args));
|
||||
}
|
||||
if (event === 'abort') {
|
||||
const error = args[0];
|
||||
if (!__classPrivateFieldGet(this, _BetaMessageStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
||||
Promise.reject(error);
|
||||
}
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_rejectConnectedPromise, "f").call(this, error);
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_rejectEndPromise, "f").call(this, error);
|
||||
this._emit('end');
|
||||
return;
|
||||
}
|
||||
if (event === 'error') {
|
||||
// NOTE: _emit('error', error) should only be called from #handleError().
|
||||
const error = args[0];
|
||||
if (!__classPrivateFieldGet(this, _BetaMessageStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
||||
// Trigger an unhandled rejection if the user hasn't registered any error handlers.
|
||||
// If you are seeing stack traces here, make sure to handle errors via either:
|
||||
// - runner.on('error', () => ...)
|
||||
// - await runner.done()
|
||||
// - await runner.final...()
|
||||
// - etc.
|
||||
Promise.reject(error);
|
||||
}
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_rejectConnectedPromise, "f").call(this, error);
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_rejectEndPromise, "f").call(this, error);
|
||||
this._emit('end');
|
||||
}
|
||||
}
|
||||
_emitFinal() {
|
||||
const finalMessage = this.receivedMessages.at(-1);
|
||||
if (finalMessage) {
|
||||
this._emit('finalMessage', __classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_getFinalMessage).call(this));
|
||||
}
|
||||
}
|
||||
async _fromReadableStream(readableStream, options) {
|
||||
const signal = options?.signal;
|
||||
let abortHandler;
|
||||
if (signal) {
|
||||
if (signal.aborted)
|
||||
this.controller.abort();
|
||||
abortHandler = this.controller.abort.bind(this.controller);
|
||||
signal.addEventListener('abort', abortHandler);
|
||||
}
|
||||
try {
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_beginRequest).call(this);
|
||||
this._connected(null);
|
||||
const stream = Stream.fromReadableStream(readableStream, this.controller);
|
||||
for await (const event of stream) {
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_addStreamEvent).call(this, event);
|
||||
}
|
||||
if (stream.controller.signal?.aborted) {
|
||||
throw new APIUserAbortError();
|
||||
}
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_endRequest).call(this);
|
||||
}
|
||||
finally {
|
||||
if (signal && abortHandler) {
|
||||
signal.removeEventListener('abort', abortHandler);
|
||||
}
|
||||
}
|
||||
}
|
||||
[(_BetaMessageStream_currentMessageSnapshot = new WeakMap(), _BetaMessageStream_params = new WeakMap(), _BetaMessageStream_connectedPromise = new WeakMap(), _BetaMessageStream_resolveConnectedPromise = new WeakMap(), _BetaMessageStream_rejectConnectedPromise = new WeakMap(), _BetaMessageStream_endPromise = new WeakMap(), _BetaMessageStream_resolveEndPromise = new WeakMap(), _BetaMessageStream_rejectEndPromise = new WeakMap(), _BetaMessageStream_listeners = new WeakMap(), _BetaMessageStream_ended = new WeakMap(), _BetaMessageStream_errored = new WeakMap(), _BetaMessageStream_aborted = new WeakMap(), _BetaMessageStream_catchingPromiseCreated = new WeakMap(), _BetaMessageStream_response = new WeakMap(), _BetaMessageStream_request_id = new WeakMap(), _BetaMessageStream_logger = new WeakMap(), _BetaMessageStream_handleError = new WeakMap(), _BetaMessageStream_instances = new WeakSet(), _BetaMessageStream_getFinalMessage = function _BetaMessageStream_getFinalMessage() {
|
||||
if (this.receivedMessages.length === 0) {
|
||||
throw new AnthropicError('stream ended without producing a Message with role=assistant');
|
||||
}
|
||||
return this.receivedMessages.at(-1);
|
||||
}, _BetaMessageStream_getFinalText = function _BetaMessageStream_getFinalText() {
|
||||
if (this.receivedMessages.length === 0) {
|
||||
throw new AnthropicError('stream ended without producing a Message with role=assistant');
|
||||
}
|
||||
const textBlocks = this.receivedMessages
|
||||
.at(-1)
|
||||
.content.filter((block) => block.type === 'text')
|
||||
.map((block) => block.text);
|
||||
if (textBlocks.length === 0) {
|
||||
throw new AnthropicError('stream ended without producing a content block with type=text');
|
||||
}
|
||||
return textBlocks.join(' ');
|
||||
}, _BetaMessageStream_beginRequest = function _BetaMessageStream_beginRequest() {
|
||||
if (this.ended)
|
||||
return;
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_currentMessageSnapshot, undefined, "f");
|
||||
}, _BetaMessageStream_addStreamEvent = function _BetaMessageStream_addStreamEvent(event) {
|
||||
if (this.ended)
|
||||
return;
|
||||
const messageSnapshot = __classPrivateFieldGet(this, _BetaMessageStream_instances, "m", _BetaMessageStream_accumulateMessage).call(this, event);
|
||||
this._emit('streamEvent', event, messageSnapshot);
|
||||
switch (event.type) {
|
||||
case 'content_block_delta': {
|
||||
const content = messageSnapshot.content.at(-1);
|
||||
switch (event.delta.type) {
|
||||
case 'text_delta': {
|
||||
if (content.type === 'text') {
|
||||
this._emit('text', event.delta.text, content.text || '');
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'citations_delta': {
|
||||
if (content.type === 'text') {
|
||||
this._emit('citation', event.delta.citation, content.citations ?? []);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'input_json_delta': {
|
||||
if (tracksToolInput(content) && content.input) {
|
||||
this._emit('inputJson', event.delta.partial_json, content.input);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'thinking_delta': {
|
||||
if (content.type === 'thinking') {
|
||||
this._emit('thinking', event.delta.thinking, content.thinking);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'signature_delta': {
|
||||
if (content.type === 'thinking') {
|
||||
this._emit('signature', content.signature);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'compaction_delta': {
|
||||
if (content.type === 'compaction' && content.content) {
|
||||
this._emit('compaction', content.content);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
checkNever(event.delta);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'message_stop': {
|
||||
this._addMessageParam(messageSnapshot);
|
||||
this._addMessage(maybeParseBetaMessage(messageSnapshot, __classPrivateFieldGet(this, _BetaMessageStream_params, "f"), { logger: __classPrivateFieldGet(this, _BetaMessageStream_logger, "f") }), true);
|
||||
break;
|
||||
}
|
||||
case 'content_block_stop': {
|
||||
this._emit('contentBlock', messageSnapshot.content.at(-1));
|
||||
break;
|
||||
}
|
||||
case 'message_start': {
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_currentMessageSnapshot, messageSnapshot, "f");
|
||||
break;
|
||||
}
|
||||
case 'content_block_start':
|
||||
case 'message_delta':
|
||||
break;
|
||||
}
|
||||
}, _BetaMessageStream_endRequest = function _BetaMessageStream_endRequest() {
|
||||
if (this.ended) {
|
||||
throw new AnthropicError(`stream has ended, this shouldn't happen`);
|
||||
}
|
||||
const snapshot = __classPrivateFieldGet(this, _BetaMessageStream_currentMessageSnapshot, "f");
|
||||
if (!snapshot) {
|
||||
throw new AnthropicError(`request ended without sending any chunks`);
|
||||
}
|
||||
__classPrivateFieldSet(this, _BetaMessageStream_currentMessageSnapshot, undefined, "f");
|
||||
return maybeParseBetaMessage(snapshot, __classPrivateFieldGet(this, _BetaMessageStream_params, "f"), { logger: __classPrivateFieldGet(this, _BetaMessageStream_logger, "f") });
|
||||
}, _BetaMessageStream_accumulateMessage = function _BetaMessageStream_accumulateMessage(event) {
|
||||
let snapshot = __classPrivateFieldGet(this, _BetaMessageStream_currentMessageSnapshot, "f");
|
||||
if (event.type === 'message_start') {
|
||||
if (snapshot) {
|
||||
throw new AnthropicError(`Unexpected event order, got ${event.type} before receiving "message_stop"`);
|
||||
}
|
||||
return event.message;
|
||||
}
|
||||
if (!snapshot) {
|
||||
throw new AnthropicError(`Unexpected event order, got ${event.type} before "message_start"`);
|
||||
}
|
||||
switch (event.type) {
|
||||
case 'message_stop':
|
||||
return snapshot;
|
||||
case 'message_delta':
|
||||
snapshot.container = event.delta.container;
|
||||
snapshot.stop_reason = event.delta.stop_reason;
|
||||
snapshot.stop_sequence = event.delta.stop_sequence;
|
||||
snapshot.usage.output_tokens = event.usage.output_tokens;
|
||||
snapshot.context_management = event.context_management;
|
||||
if (event.usage.input_tokens != null) {
|
||||
snapshot.usage.input_tokens = event.usage.input_tokens;
|
||||
}
|
||||
if (event.usage.cache_creation_input_tokens != null) {
|
||||
snapshot.usage.cache_creation_input_tokens = event.usage.cache_creation_input_tokens;
|
||||
}
|
||||
if (event.usage.cache_read_input_tokens != null) {
|
||||
snapshot.usage.cache_read_input_tokens = event.usage.cache_read_input_tokens;
|
||||
}
|
||||
if (event.usage.server_tool_use != null) {
|
||||
snapshot.usage.server_tool_use = event.usage.server_tool_use;
|
||||
}
|
||||
if (event.usage.iterations != null) {
|
||||
snapshot.usage.iterations = event.usage.iterations;
|
||||
}
|
||||
return snapshot;
|
||||
case 'content_block_start':
|
||||
snapshot.content.push(event.content_block);
|
||||
return snapshot;
|
||||
case 'content_block_delta': {
|
||||
const snapshotContent = snapshot.content.at(event.index);
|
||||
switch (event.delta.type) {
|
||||
case 'text_delta': {
|
||||
if (snapshotContent?.type === 'text') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
text: (snapshotContent.text || '') + event.delta.text,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'citations_delta': {
|
||||
if (snapshotContent?.type === 'text') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
citations: [...(snapshotContent.citations ?? []), event.delta.citation],
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'input_json_delta': {
|
||||
if (snapshotContent && tracksToolInput(snapshotContent)) {
|
||||
// we need to keep track of the raw JSON string as well so that we can
|
||||
// re-parse it for each delta, for now we just store it as an untyped
|
||||
// non-enumerable property on the snapshot
|
||||
let jsonBuf = snapshotContent[JSON_BUF_PROPERTY] || '';
|
||||
jsonBuf += event.delta.partial_json;
|
||||
const newContent = { ...snapshotContent };
|
||||
Object.defineProperty(newContent, JSON_BUF_PROPERTY, {
|
||||
value: jsonBuf,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
});
|
||||
if (jsonBuf) {
|
||||
try {
|
||||
newContent.input = partialParse(jsonBuf);
|
||||
}
|
||||
catch (err) {
|
||||
const error = new AnthropicError(`Unable to parse tool parameter JSON from model. Please retry your request or adjust your prompt. Error: ${err}. JSON: ${jsonBuf}`);
|
||||
__classPrivateFieldGet(this, _BetaMessageStream_handleError, "f").call(this, error);
|
||||
}
|
||||
}
|
||||
snapshot.content[event.index] = newContent;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'thinking_delta': {
|
||||
if (snapshotContent?.type === 'thinking') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
thinking: snapshotContent.thinking + event.delta.thinking,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'signature_delta': {
|
||||
if (snapshotContent?.type === 'thinking') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
signature: event.delta.signature,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'compaction_delta': {
|
||||
if (snapshotContent?.type === 'compaction') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
content: (snapshotContent.content || '') + event.delta.content,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
checkNever(event.delta);
|
||||
}
|
||||
return snapshot;
|
||||
}
|
||||
case 'content_block_stop':
|
||||
return snapshot;
|
||||
}
|
||||
}, Symbol.asyncIterator)]() {
|
||||
const pushQueue = [];
|
||||
const readQueue = [];
|
||||
let done = false;
|
||||
this.on('streamEvent', (event) => {
|
||||
const reader = readQueue.shift();
|
||||
if (reader) {
|
||||
reader.resolve(event);
|
||||
}
|
||||
else {
|
||||
pushQueue.push(event);
|
||||
}
|
||||
});
|
||||
this.on('end', () => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.resolve(undefined);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('abort', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('error', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
return {
|
||||
next: async () => {
|
||||
if (!pushQueue.length) {
|
||||
if (done) {
|
||||
return { value: undefined, done: true };
|
||||
}
|
||||
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true }));
|
||||
}
|
||||
const chunk = pushQueue.shift();
|
||||
return { value: chunk, done: false };
|
||||
},
|
||||
return: async () => {
|
||||
this.abort();
|
||||
return { value: undefined, done: true };
|
||||
},
|
||||
};
|
||||
}
|
||||
toReadableStream() {
|
||||
const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller);
|
||||
return stream.toReadableStream();
|
||||
}
|
||||
}
|
||||
// used to ensure exhaustive case matching without throwing a runtime error
|
||||
function checkNever(x) { }
|
||||
//# sourceMappingURL=BetaMessageStream.mjs.map
|
||||
591
extracted-source/node_modules/@anthropic-ai/sdk/lib/MessageStream.mjs
generated
vendored
Normal file
591
extracted-source/node_modules/@anthropic-ai/sdk/lib/MessageStream.mjs
generated
vendored
Normal file
@@ -0,0 +1,591 @@
|
||||
var _MessageStream_instances, _MessageStream_currentMessageSnapshot, _MessageStream_params, _MessageStream_connectedPromise, _MessageStream_resolveConnectedPromise, _MessageStream_rejectConnectedPromise, _MessageStream_endPromise, _MessageStream_resolveEndPromise, _MessageStream_rejectEndPromise, _MessageStream_listeners, _MessageStream_ended, _MessageStream_errored, _MessageStream_aborted, _MessageStream_catchingPromiseCreated, _MessageStream_response, _MessageStream_request_id, _MessageStream_logger, _MessageStream_getFinalMessage, _MessageStream_getFinalText, _MessageStream_handleError, _MessageStream_beginRequest, _MessageStream_addStreamEvent, _MessageStream_endRequest, _MessageStream_accumulateMessage;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../internal/tslib.mjs";
|
||||
import { isAbortError } from "../internal/errors.mjs";
|
||||
import { AnthropicError, APIUserAbortError } from "../error.mjs";
|
||||
import { Stream } from "../streaming.mjs";
|
||||
import { partialParse } from "../_vendor/partial-json-parser/parser.mjs";
|
||||
import { maybeParseMessage } from "./parser.mjs";
|
||||
const JSON_BUF_PROPERTY = '__json_buf';
|
||||
function tracksToolInput(content) {
|
||||
return content.type === 'tool_use' || content.type === 'server_tool_use';
|
||||
}
|
||||
export class MessageStream {
|
||||
constructor(params, opts) {
|
||||
_MessageStream_instances.add(this);
|
||||
this.messages = [];
|
||||
this.receivedMessages = [];
|
||||
_MessageStream_currentMessageSnapshot.set(this, void 0);
|
||||
_MessageStream_params.set(this, null);
|
||||
this.controller = new AbortController();
|
||||
_MessageStream_connectedPromise.set(this, void 0);
|
||||
_MessageStream_resolveConnectedPromise.set(this, () => { });
|
||||
_MessageStream_rejectConnectedPromise.set(this, () => { });
|
||||
_MessageStream_endPromise.set(this, void 0);
|
||||
_MessageStream_resolveEndPromise.set(this, () => { });
|
||||
_MessageStream_rejectEndPromise.set(this, () => { });
|
||||
_MessageStream_listeners.set(this, {});
|
||||
_MessageStream_ended.set(this, false);
|
||||
_MessageStream_errored.set(this, false);
|
||||
_MessageStream_aborted.set(this, false);
|
||||
_MessageStream_catchingPromiseCreated.set(this, false);
|
||||
_MessageStream_response.set(this, void 0);
|
||||
_MessageStream_request_id.set(this, void 0);
|
||||
_MessageStream_logger.set(this, void 0);
|
||||
_MessageStream_handleError.set(this, (error) => {
|
||||
__classPrivateFieldSet(this, _MessageStream_errored, true, "f");
|
||||
if (isAbortError(error)) {
|
||||
error = new APIUserAbortError();
|
||||
}
|
||||
if (error instanceof APIUserAbortError) {
|
||||
__classPrivateFieldSet(this, _MessageStream_aborted, true, "f");
|
||||
return this._emit('abort', error);
|
||||
}
|
||||
if (error instanceof AnthropicError) {
|
||||
return this._emit('error', error);
|
||||
}
|
||||
if (error instanceof Error) {
|
||||
const anthropicError = new AnthropicError(error.message);
|
||||
// @ts-ignore
|
||||
anthropicError.cause = error;
|
||||
return this._emit('error', anthropicError);
|
||||
}
|
||||
return this._emit('error', new AnthropicError(String(error)));
|
||||
});
|
||||
__classPrivateFieldSet(this, _MessageStream_connectedPromise, new Promise((resolve, reject) => {
|
||||
__classPrivateFieldSet(this, _MessageStream_resolveConnectedPromise, resolve, "f");
|
||||
__classPrivateFieldSet(this, _MessageStream_rejectConnectedPromise, reject, "f");
|
||||
}), "f");
|
||||
__classPrivateFieldSet(this, _MessageStream_endPromise, new Promise((resolve, reject) => {
|
||||
__classPrivateFieldSet(this, _MessageStream_resolveEndPromise, resolve, "f");
|
||||
__classPrivateFieldSet(this, _MessageStream_rejectEndPromise, reject, "f");
|
||||
}), "f");
|
||||
// Don't let these promises cause unhandled rejection errors.
|
||||
// we will manually cause an unhandled rejection error later
|
||||
// if the user hasn't registered any error listener or called
|
||||
// any promise-returning method.
|
||||
__classPrivateFieldGet(this, _MessageStream_connectedPromise, "f").catch(() => { });
|
||||
__classPrivateFieldGet(this, _MessageStream_endPromise, "f").catch(() => { });
|
||||
__classPrivateFieldSet(this, _MessageStream_params, params, "f");
|
||||
__classPrivateFieldSet(this, _MessageStream_logger, opts?.logger ?? console, "f");
|
||||
}
|
||||
get response() {
|
||||
return __classPrivateFieldGet(this, _MessageStream_response, "f");
|
||||
}
|
||||
get request_id() {
|
||||
return __classPrivateFieldGet(this, _MessageStream_request_id, "f");
|
||||
}
|
||||
/**
|
||||
* Returns the `MessageStream` data, the raw `Response` instance and the ID of the request,
|
||||
* returned vie the `request-id` header which is useful for debugging requests and resporting
|
||||
* issues to Anthropic.
|
||||
*
|
||||
* This is the same as the `APIPromise.withResponse()` method.
|
||||
*
|
||||
* This method will raise an error if you created the stream using `MessageStream.fromReadableStream`
|
||||
* as no `Response` is available.
|
||||
*/
|
||||
async withResponse() {
|
||||
__classPrivateFieldSet(this, _MessageStream_catchingPromiseCreated, true, "f");
|
||||
const response = await __classPrivateFieldGet(this, _MessageStream_connectedPromise, "f");
|
||||
if (!response) {
|
||||
throw new Error('Could not resolve a `Response` object');
|
||||
}
|
||||
return {
|
||||
data: this,
|
||||
response,
|
||||
request_id: response.headers.get('request-id'),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Intended for use on the frontend, consuming a stream produced with
|
||||
* `.toReadableStream()` on the backend.
|
||||
*
|
||||
* Note that messages sent to the model do not appear in `.on('message')`
|
||||
* in this context.
|
||||
*/
|
||||
static fromReadableStream(stream) {
|
||||
const runner = new MessageStream(null);
|
||||
runner._run(() => runner._fromReadableStream(stream));
|
||||
return runner;
|
||||
}
|
||||
static createMessage(messages, params, options, { logger } = {}) {
|
||||
const runner = new MessageStream(params, { logger });
|
||||
for (const message of params.messages) {
|
||||
runner._addMessageParam(message);
|
||||
}
|
||||
__classPrivateFieldSet(runner, _MessageStream_params, { ...params, stream: true }, "f");
|
||||
runner._run(() => runner._createMessage(messages, { ...params, stream: true }, { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } }));
|
||||
return runner;
|
||||
}
|
||||
_run(executor) {
|
||||
executor().then(() => {
|
||||
this._emitFinal();
|
||||
this._emit('end');
|
||||
}, __classPrivateFieldGet(this, _MessageStream_handleError, "f"));
|
||||
}
|
||||
_addMessageParam(message) {
|
||||
this.messages.push(message);
|
||||
}
|
||||
_addMessage(message, emit = true) {
|
||||
this.receivedMessages.push(message);
|
||||
if (emit) {
|
||||
this._emit('message', message);
|
||||
}
|
||||
}
|
||||
async _createMessage(messages, params, options) {
|
||||
const signal = options?.signal;
|
||||
let abortHandler;
|
||||
if (signal) {
|
||||
if (signal.aborted)
|
||||
this.controller.abort();
|
||||
abortHandler = this.controller.abort.bind(this.controller);
|
||||
signal.addEventListener('abort', abortHandler);
|
||||
}
|
||||
try {
|
||||
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_beginRequest).call(this);
|
||||
const { response, data: stream } = await messages
|
||||
.create({ ...params, stream: true }, { ...options, signal: this.controller.signal })
|
||||
.withResponse();
|
||||
this._connected(response);
|
||||
for await (const event of stream) {
|
||||
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_addStreamEvent).call(this, event);
|
||||
}
|
||||
if (stream.controller.signal?.aborted) {
|
||||
throw new APIUserAbortError();
|
||||
}
|
||||
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_endRequest).call(this);
|
||||
}
|
||||
finally {
|
||||
if (signal && abortHandler) {
|
||||
signal.removeEventListener('abort', abortHandler);
|
||||
}
|
||||
}
|
||||
}
|
||||
_connected(response) {
|
||||
if (this.ended)
|
||||
return;
|
||||
__classPrivateFieldSet(this, _MessageStream_response, response, "f");
|
||||
__classPrivateFieldSet(this, _MessageStream_request_id, response?.headers.get('request-id'), "f");
|
||||
__classPrivateFieldGet(this, _MessageStream_resolveConnectedPromise, "f").call(this, response);
|
||||
this._emit('connect');
|
||||
}
|
||||
get ended() {
|
||||
return __classPrivateFieldGet(this, _MessageStream_ended, "f");
|
||||
}
|
||||
get errored() {
|
||||
return __classPrivateFieldGet(this, _MessageStream_errored, "f");
|
||||
}
|
||||
get aborted() {
|
||||
return __classPrivateFieldGet(this, _MessageStream_aborted, "f");
|
||||
}
|
||||
abort() {
|
||||
this.controller.abort();
|
||||
}
|
||||
/**
|
||||
* Adds the listener function to the end of the listeners array for the event.
|
||||
* No checks are made to see if the listener has already been added. Multiple calls passing
|
||||
* the same combination of event and listener will result in the listener being added, and
|
||||
* called, multiple times.
|
||||
* @returns this MessageStream, so that calls can be chained
|
||||
*/
|
||||
on(event, listener) {
|
||||
const listeners = __classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] = []);
|
||||
listeners.push({ listener });
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Removes the specified listener from the listener array for the event.
|
||||
* off() will remove, at most, one instance of a listener from the listener array. If any single
|
||||
* listener has been added multiple times to the listener array for the specified event, then
|
||||
* off() must be called multiple times to remove each instance.
|
||||
* @returns this MessageStream, so that calls can be chained
|
||||
*/
|
||||
off(event, listener) {
|
||||
const listeners = __classPrivateFieldGet(this, _MessageStream_listeners, "f")[event];
|
||||
if (!listeners)
|
||||
return this;
|
||||
const index = listeners.findIndex((l) => l.listener === listener);
|
||||
if (index >= 0)
|
||||
listeners.splice(index, 1);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Adds a one-time listener function for the event. The next time the event is triggered,
|
||||
* this listener is removed and then invoked.
|
||||
* @returns this MessageStream, so that calls can be chained
|
||||
*/
|
||||
once(event, listener) {
|
||||
const listeners = __classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] || (__classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] = []);
|
||||
listeners.push({ listener, once: true });
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* This is similar to `.once()`, but returns a Promise that resolves the next time
|
||||
* the event is triggered, instead of calling a listener callback.
|
||||
* @returns a Promise that resolves the next time given event is triggered,
|
||||
* or rejects if an error is emitted. (If you request the 'error' event,
|
||||
* returns a promise that resolves with the error).
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* const message = await stream.emitted('message') // rejects if the stream errors
|
||||
*/
|
||||
emitted(event) {
|
||||
return new Promise((resolve, reject) => {
|
||||
__classPrivateFieldSet(this, _MessageStream_catchingPromiseCreated, true, "f");
|
||||
if (event !== 'error')
|
||||
this.once('error', reject);
|
||||
this.once(event, resolve);
|
||||
});
|
||||
}
|
||||
async done() {
|
||||
__classPrivateFieldSet(this, _MessageStream_catchingPromiseCreated, true, "f");
|
||||
await __classPrivateFieldGet(this, _MessageStream_endPromise, "f");
|
||||
}
|
||||
get currentMessage() {
|
||||
return __classPrivateFieldGet(this, _MessageStream_currentMessageSnapshot, "f");
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the the final assistant Message response,
|
||||
* or rejects if an error occurred or the stream ended prematurely without producing a Message.
|
||||
* If structured outputs were used, this will be a ParsedMessage with a `parsed_output` field.
|
||||
*/
|
||||
async finalMessage() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_getFinalMessage).call(this);
|
||||
}
|
||||
/**
|
||||
* @returns a promise that resolves with the the final assistant Message's text response, concatenated
|
||||
* together if there are more than one text blocks.
|
||||
* Rejects if an error occurred or the stream ended prematurely without producing a Message.
|
||||
*/
|
||||
async finalText() {
|
||||
await this.done();
|
||||
return __classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_getFinalText).call(this);
|
||||
}
|
||||
_emit(event, ...args) {
|
||||
// make sure we don't emit any MessageStreamEvents after end
|
||||
if (__classPrivateFieldGet(this, _MessageStream_ended, "f"))
|
||||
return;
|
||||
if (event === 'end') {
|
||||
__classPrivateFieldSet(this, _MessageStream_ended, true, "f");
|
||||
__classPrivateFieldGet(this, _MessageStream_resolveEndPromise, "f").call(this);
|
||||
}
|
||||
const listeners = __classPrivateFieldGet(this, _MessageStream_listeners, "f")[event];
|
||||
if (listeners) {
|
||||
__classPrivateFieldGet(this, _MessageStream_listeners, "f")[event] = listeners.filter((l) => !l.once);
|
||||
listeners.forEach(({ listener }) => listener(...args));
|
||||
}
|
||||
if (event === 'abort') {
|
||||
const error = args[0];
|
||||
if (!__classPrivateFieldGet(this, _MessageStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
||||
Promise.reject(error);
|
||||
}
|
||||
__classPrivateFieldGet(this, _MessageStream_rejectConnectedPromise, "f").call(this, error);
|
||||
__classPrivateFieldGet(this, _MessageStream_rejectEndPromise, "f").call(this, error);
|
||||
this._emit('end');
|
||||
return;
|
||||
}
|
||||
if (event === 'error') {
|
||||
// NOTE: _emit('error', error) should only be called from #handleError().
|
||||
const error = args[0];
|
||||
if (!__classPrivateFieldGet(this, _MessageStream_catchingPromiseCreated, "f") && !listeners?.length) {
|
||||
// Trigger an unhandled rejection if the user hasn't registered any error handlers.
|
||||
// If you are seeing stack traces here, make sure to handle errors via either:
|
||||
// - runner.on('error', () => ...)
|
||||
// - await runner.done()
|
||||
// - await runner.final...()
|
||||
// - etc.
|
||||
Promise.reject(error);
|
||||
}
|
||||
__classPrivateFieldGet(this, _MessageStream_rejectConnectedPromise, "f").call(this, error);
|
||||
__classPrivateFieldGet(this, _MessageStream_rejectEndPromise, "f").call(this, error);
|
||||
this._emit('end');
|
||||
}
|
||||
}
|
||||
_emitFinal() {
|
||||
const finalMessage = this.receivedMessages.at(-1);
|
||||
if (finalMessage) {
|
||||
this._emit('finalMessage', __classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_getFinalMessage).call(this));
|
||||
}
|
||||
}
|
||||
async _fromReadableStream(readableStream, options) {
|
||||
const signal = options?.signal;
|
||||
let abortHandler;
|
||||
if (signal) {
|
||||
if (signal.aborted)
|
||||
this.controller.abort();
|
||||
abortHandler = this.controller.abort.bind(this.controller);
|
||||
signal.addEventListener('abort', abortHandler);
|
||||
}
|
||||
try {
|
||||
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_beginRequest).call(this);
|
||||
this._connected(null);
|
||||
const stream = Stream.fromReadableStream(readableStream, this.controller);
|
||||
for await (const event of stream) {
|
||||
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_addStreamEvent).call(this, event);
|
||||
}
|
||||
if (stream.controller.signal?.aborted) {
|
||||
throw new APIUserAbortError();
|
||||
}
|
||||
__classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_endRequest).call(this);
|
||||
}
|
||||
finally {
|
||||
if (signal && abortHandler) {
|
||||
signal.removeEventListener('abort', abortHandler);
|
||||
}
|
||||
}
|
||||
}
|
||||
[(_MessageStream_currentMessageSnapshot = new WeakMap(), _MessageStream_params = new WeakMap(), _MessageStream_connectedPromise = new WeakMap(), _MessageStream_resolveConnectedPromise = new WeakMap(), _MessageStream_rejectConnectedPromise = new WeakMap(), _MessageStream_endPromise = new WeakMap(), _MessageStream_resolveEndPromise = new WeakMap(), _MessageStream_rejectEndPromise = new WeakMap(), _MessageStream_listeners = new WeakMap(), _MessageStream_ended = new WeakMap(), _MessageStream_errored = new WeakMap(), _MessageStream_aborted = new WeakMap(), _MessageStream_catchingPromiseCreated = new WeakMap(), _MessageStream_response = new WeakMap(), _MessageStream_request_id = new WeakMap(), _MessageStream_logger = new WeakMap(), _MessageStream_handleError = new WeakMap(), _MessageStream_instances = new WeakSet(), _MessageStream_getFinalMessage = function _MessageStream_getFinalMessage() {
|
||||
if (this.receivedMessages.length === 0) {
|
||||
throw new AnthropicError('stream ended without producing a Message with role=assistant');
|
||||
}
|
||||
return this.receivedMessages.at(-1);
|
||||
}, _MessageStream_getFinalText = function _MessageStream_getFinalText() {
|
||||
if (this.receivedMessages.length === 0) {
|
||||
throw new AnthropicError('stream ended without producing a Message with role=assistant');
|
||||
}
|
||||
const textBlocks = this.receivedMessages
|
||||
.at(-1)
|
||||
.content.filter((block) => block.type === 'text')
|
||||
.map((block) => block.text);
|
||||
if (textBlocks.length === 0) {
|
||||
throw new AnthropicError('stream ended without producing a content block with type=text');
|
||||
}
|
||||
return textBlocks.join(' ');
|
||||
}, _MessageStream_beginRequest = function _MessageStream_beginRequest() {
|
||||
if (this.ended)
|
||||
return;
|
||||
__classPrivateFieldSet(this, _MessageStream_currentMessageSnapshot, undefined, "f");
|
||||
}, _MessageStream_addStreamEvent = function _MessageStream_addStreamEvent(event) {
|
||||
if (this.ended)
|
||||
return;
|
||||
const messageSnapshot = __classPrivateFieldGet(this, _MessageStream_instances, "m", _MessageStream_accumulateMessage).call(this, event);
|
||||
this._emit('streamEvent', event, messageSnapshot);
|
||||
switch (event.type) {
|
||||
case 'content_block_delta': {
|
||||
const content = messageSnapshot.content.at(-1);
|
||||
switch (event.delta.type) {
|
||||
case 'text_delta': {
|
||||
if (content.type === 'text') {
|
||||
this._emit('text', event.delta.text, content.text || '');
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'citations_delta': {
|
||||
if (content.type === 'text') {
|
||||
this._emit('citation', event.delta.citation, content.citations ?? []);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'input_json_delta': {
|
||||
if (tracksToolInput(content) && content.input) {
|
||||
this._emit('inputJson', event.delta.partial_json, content.input);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'thinking_delta': {
|
||||
if (content.type === 'thinking') {
|
||||
this._emit('thinking', event.delta.thinking, content.thinking);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'signature_delta': {
|
||||
if (content.type === 'thinking') {
|
||||
this._emit('signature', content.signature);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
checkNever(event.delta);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'message_stop': {
|
||||
this._addMessageParam(messageSnapshot);
|
||||
this._addMessage(maybeParseMessage(messageSnapshot, __classPrivateFieldGet(this, _MessageStream_params, "f"), { logger: __classPrivateFieldGet(this, _MessageStream_logger, "f") }), true);
|
||||
break;
|
||||
}
|
||||
case 'content_block_stop': {
|
||||
this._emit('contentBlock', messageSnapshot.content.at(-1));
|
||||
break;
|
||||
}
|
||||
case 'message_start': {
|
||||
__classPrivateFieldSet(this, _MessageStream_currentMessageSnapshot, messageSnapshot, "f");
|
||||
break;
|
||||
}
|
||||
case 'content_block_start':
|
||||
case 'message_delta':
|
||||
break;
|
||||
}
|
||||
}, _MessageStream_endRequest = function _MessageStream_endRequest() {
|
||||
if (this.ended) {
|
||||
throw new AnthropicError(`stream has ended, this shouldn't happen`);
|
||||
}
|
||||
const snapshot = __classPrivateFieldGet(this, _MessageStream_currentMessageSnapshot, "f");
|
||||
if (!snapshot) {
|
||||
throw new AnthropicError(`request ended without sending any chunks`);
|
||||
}
|
||||
__classPrivateFieldSet(this, _MessageStream_currentMessageSnapshot, undefined, "f");
|
||||
return maybeParseMessage(snapshot, __classPrivateFieldGet(this, _MessageStream_params, "f"), { logger: __classPrivateFieldGet(this, _MessageStream_logger, "f") });
|
||||
}, _MessageStream_accumulateMessage = function _MessageStream_accumulateMessage(event) {
|
||||
let snapshot = __classPrivateFieldGet(this, _MessageStream_currentMessageSnapshot, "f");
|
||||
if (event.type === 'message_start') {
|
||||
if (snapshot) {
|
||||
throw new AnthropicError(`Unexpected event order, got ${event.type} before receiving "message_stop"`);
|
||||
}
|
||||
return event.message;
|
||||
}
|
||||
if (!snapshot) {
|
||||
throw new AnthropicError(`Unexpected event order, got ${event.type} before "message_start"`);
|
||||
}
|
||||
switch (event.type) {
|
||||
case 'message_stop':
|
||||
return snapshot;
|
||||
case 'message_delta':
|
||||
snapshot.stop_reason = event.delta.stop_reason;
|
||||
snapshot.stop_sequence = event.delta.stop_sequence;
|
||||
snapshot.usage.output_tokens = event.usage.output_tokens;
|
||||
// Update other usage fields if they exist in the event
|
||||
if (event.usage.input_tokens != null) {
|
||||
snapshot.usage.input_tokens = event.usage.input_tokens;
|
||||
}
|
||||
if (event.usage.cache_creation_input_tokens != null) {
|
||||
snapshot.usage.cache_creation_input_tokens = event.usage.cache_creation_input_tokens;
|
||||
}
|
||||
if (event.usage.cache_read_input_tokens != null) {
|
||||
snapshot.usage.cache_read_input_tokens = event.usage.cache_read_input_tokens;
|
||||
}
|
||||
if (event.usage.server_tool_use != null) {
|
||||
snapshot.usage.server_tool_use = event.usage.server_tool_use;
|
||||
}
|
||||
return snapshot;
|
||||
case 'content_block_start':
|
||||
snapshot.content.push({ ...event.content_block });
|
||||
return snapshot;
|
||||
case 'content_block_delta': {
|
||||
const snapshotContent = snapshot.content.at(event.index);
|
||||
switch (event.delta.type) {
|
||||
case 'text_delta': {
|
||||
if (snapshotContent?.type === 'text') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
text: (snapshotContent.text || '') + event.delta.text,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'citations_delta': {
|
||||
if (snapshotContent?.type === 'text') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
citations: [...(snapshotContent.citations ?? []), event.delta.citation],
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'input_json_delta': {
|
||||
if (snapshotContent && tracksToolInput(snapshotContent)) {
|
||||
// we need to keep track of the raw JSON string as well so that we can
|
||||
// re-parse it for each delta, for now we just store it as an untyped
|
||||
// non-enumerable property on the snapshot
|
||||
let jsonBuf = snapshotContent[JSON_BUF_PROPERTY] || '';
|
||||
jsonBuf += event.delta.partial_json;
|
||||
const newContent = { ...snapshotContent };
|
||||
Object.defineProperty(newContent, JSON_BUF_PROPERTY, {
|
||||
value: jsonBuf,
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
});
|
||||
if (jsonBuf) {
|
||||
newContent.input = partialParse(jsonBuf);
|
||||
}
|
||||
snapshot.content[event.index] = newContent;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'thinking_delta': {
|
||||
if (snapshotContent?.type === 'thinking') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
thinking: snapshotContent.thinking + event.delta.thinking,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'signature_delta': {
|
||||
if (snapshotContent?.type === 'thinking') {
|
||||
snapshot.content[event.index] = {
|
||||
...snapshotContent,
|
||||
signature: event.delta.signature,
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
checkNever(event.delta);
|
||||
}
|
||||
return snapshot;
|
||||
}
|
||||
case 'content_block_stop':
|
||||
return snapshot;
|
||||
}
|
||||
}, Symbol.asyncIterator)]() {
|
||||
const pushQueue = [];
|
||||
const readQueue = [];
|
||||
let done = false;
|
||||
this.on('streamEvent', (event) => {
|
||||
const reader = readQueue.shift();
|
||||
if (reader) {
|
||||
reader.resolve(event);
|
||||
}
|
||||
else {
|
||||
pushQueue.push(event);
|
||||
}
|
||||
});
|
||||
this.on('end', () => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.resolve(undefined);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('abort', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
this.on('error', (err) => {
|
||||
done = true;
|
||||
for (const reader of readQueue) {
|
||||
reader.reject(err);
|
||||
}
|
||||
readQueue.length = 0;
|
||||
});
|
||||
return {
|
||||
next: async () => {
|
||||
if (!pushQueue.length) {
|
||||
if (done) {
|
||||
return { value: undefined, done: true };
|
||||
}
|
||||
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true }));
|
||||
}
|
||||
const chunk = pushQueue.shift();
|
||||
return { value: chunk, done: false };
|
||||
},
|
||||
return: async () => {
|
||||
this.abort();
|
||||
return { value: undefined, done: true };
|
||||
},
|
||||
};
|
||||
}
|
||||
toReadableStream() {
|
||||
const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller);
|
||||
return stream.toReadableStream();
|
||||
}
|
||||
}
|
||||
// used to ensure exhaustive case matching without throwing a runtime error
|
||||
function checkNever(x) { }
|
||||
//# sourceMappingURL=MessageStream.mjs.map
|
||||
75
extracted-source/node_modules/@anthropic-ai/sdk/lib/beta-parser.mjs
generated
vendored
Normal file
75
extracted-source/node_modules/@anthropic-ai/sdk/lib/beta-parser.mjs
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import { AnthropicError } from "../core/error.mjs";
|
||||
function getOutputFormat(params) {
|
||||
// Prefer output_format (deprecated) over output_config.format for backward compatibility
|
||||
return params?.output_format ?? params?.output_config?.format;
|
||||
}
|
||||
export function maybeParseBetaMessage(message, params, opts) {
|
||||
const outputFormat = getOutputFormat(params);
|
||||
if (!params || !('parse' in (outputFormat ?? {}))) {
|
||||
return {
|
||||
...message,
|
||||
content: message.content.map((block) => {
|
||||
if (block.type === 'text') {
|
||||
const parsedBlock = Object.defineProperty({ ...block }, 'parsed_output', {
|
||||
value: null,
|
||||
enumerable: false,
|
||||
});
|
||||
return Object.defineProperty(parsedBlock, 'parsed', {
|
||||
get() {
|
||||
opts.logger.warn('The `parsed` property on `text` blocks is deprecated, please use `parsed_output` instead.');
|
||||
return null;
|
||||
},
|
||||
enumerable: false,
|
||||
});
|
||||
}
|
||||
return block;
|
||||
}),
|
||||
parsed_output: null,
|
||||
};
|
||||
}
|
||||
return parseBetaMessage(message, params, opts);
|
||||
}
|
||||
export function parseBetaMessage(message, params, opts) {
|
||||
let firstParsedOutput = null;
|
||||
const content = message.content.map((block) => {
|
||||
if (block.type === 'text') {
|
||||
const parsedOutput = parseBetaOutputFormat(params, block.text);
|
||||
if (firstParsedOutput === null) {
|
||||
firstParsedOutput = parsedOutput;
|
||||
}
|
||||
const parsedBlock = Object.defineProperty({ ...block }, 'parsed_output', {
|
||||
value: parsedOutput,
|
||||
enumerable: false,
|
||||
});
|
||||
return Object.defineProperty(parsedBlock, 'parsed', {
|
||||
get() {
|
||||
opts.logger.warn('The `parsed` property on `text` blocks is deprecated, please use `parsed_output` instead.');
|
||||
return parsedOutput;
|
||||
},
|
||||
enumerable: false,
|
||||
});
|
||||
}
|
||||
return block;
|
||||
});
|
||||
return {
|
||||
...message,
|
||||
content,
|
||||
parsed_output: firstParsedOutput,
|
||||
};
|
||||
}
|
||||
function parseBetaOutputFormat(params, content) {
|
||||
const outputFormat = getOutputFormat(params);
|
||||
if (outputFormat?.type !== 'json_schema') {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
if ('parse' in outputFormat) {
|
||||
return outputFormat.parse(content);
|
||||
}
|
||||
return JSON.parse(content);
|
||||
}
|
||||
catch (error) {
|
||||
throw new AnthropicError(`Failed to parse structured output: ${error}`);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=beta-parser.mjs.map
|
||||
62
extracted-source/node_modules/@anthropic-ai/sdk/lib/parser.mjs
generated
vendored
Normal file
62
extracted-source/node_modules/@anthropic-ai/sdk/lib/parser.mjs
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
import { AnthropicError } from "../core/error.mjs";
|
||||
function getOutputFormat(params) {
|
||||
return params?.output_config?.format;
|
||||
}
|
||||
export function maybeParseMessage(message, params, opts) {
|
||||
const outputFormat = getOutputFormat(params);
|
||||
if (!params || !('parse' in (outputFormat ?? {}))) {
|
||||
return {
|
||||
...message,
|
||||
content: message.content.map((block) => {
|
||||
if (block.type === 'text') {
|
||||
const parsedBlock = Object.defineProperty({ ...block }, 'parsed_output', {
|
||||
value: null,
|
||||
enumerable: false,
|
||||
});
|
||||
return parsedBlock;
|
||||
}
|
||||
return block;
|
||||
}),
|
||||
parsed_output: null,
|
||||
};
|
||||
}
|
||||
return parseMessage(message, params, opts);
|
||||
}
|
||||
export function parseMessage(message, params, opts) {
|
||||
let firstParsedOutput = null;
|
||||
const content = message.content.map((block) => {
|
||||
if (block.type === 'text') {
|
||||
const parsedOutput = parseOutputFormat(params, block.text);
|
||||
if (firstParsedOutput === null) {
|
||||
firstParsedOutput = parsedOutput;
|
||||
}
|
||||
const parsedBlock = Object.defineProperty({ ...block }, 'parsed_output', {
|
||||
value: parsedOutput,
|
||||
enumerable: false,
|
||||
});
|
||||
return parsedBlock;
|
||||
}
|
||||
return block;
|
||||
});
|
||||
return {
|
||||
...message,
|
||||
content,
|
||||
parsed_output: firstParsedOutput,
|
||||
};
|
||||
}
|
||||
function parseOutputFormat(params, content) {
|
||||
const outputFormat = getOutputFormat(params);
|
||||
if (outputFormat?.type !== 'json_schema') {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
if ('parse' in outputFormat) {
|
||||
return outputFormat.parse(content);
|
||||
}
|
||||
return JSON.parse(content);
|
||||
}
|
||||
catch (error) {
|
||||
throw new AnthropicError(`Failed to parse structured output: ${error}`);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=parser.mjs.map
|
||||
63
extracted-source/node_modules/@anthropic-ai/sdk/lib/stainless-helper-header.mjs
generated
vendored
Normal file
63
extracted-source/node_modules/@anthropic-ai/sdk/lib/stainless-helper-header.mjs
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
/**
|
||||
* Shared utilities for tracking SDK helper usage.
|
||||
*/
|
||||
/**
|
||||
* Symbol used to mark objects created by SDK helpers for tracking.
|
||||
* The value is the helper name (e.g., 'mcpTool', 'betaZodTool').
|
||||
*/
|
||||
export const SDK_HELPER_SYMBOL = Symbol('anthropic.sdk.stainlessHelper');
|
||||
export function wasCreatedByStainlessHelper(value) {
|
||||
return typeof value === 'object' && value !== null && SDK_HELPER_SYMBOL in value;
|
||||
}
|
||||
/**
|
||||
* Collects helper names from tools and messages arrays.
|
||||
* Returns a deduplicated array of helper names found.
|
||||
*/
|
||||
export function collectStainlessHelpers(tools, messages) {
|
||||
const helpers = new Set();
|
||||
// Collect from tools
|
||||
if (tools) {
|
||||
for (const tool of tools) {
|
||||
if (wasCreatedByStainlessHelper(tool)) {
|
||||
helpers.add(tool[SDK_HELPER_SYMBOL]);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Collect from messages and their content blocks
|
||||
if (messages) {
|
||||
for (const message of messages) {
|
||||
if (wasCreatedByStainlessHelper(message)) {
|
||||
helpers.add(message[SDK_HELPER_SYMBOL]);
|
||||
}
|
||||
if (Array.isArray(message.content)) {
|
||||
for (const block of message.content) {
|
||||
if (wasCreatedByStainlessHelper(block)) {
|
||||
helpers.add(block[SDK_HELPER_SYMBOL]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return Array.from(helpers);
|
||||
}
|
||||
/**
|
||||
* Builds x-stainless-helper header value from tools and messages.
|
||||
* Returns an empty object if no helpers are found.
|
||||
*/
|
||||
export function stainlessHelperHeader(tools, messages) {
|
||||
const helpers = collectStainlessHelpers(tools, messages);
|
||||
if (helpers.length === 0)
|
||||
return {};
|
||||
return { 'x-stainless-helper': helpers.join(', ') };
|
||||
}
|
||||
/**
|
||||
* Builds x-stainless-helper header value from a file object.
|
||||
* Returns an empty object if the file is not marked with a helper.
|
||||
*/
|
||||
export function stainlessHelperHeaderFromFile(file) {
|
||||
if (wasCreatedByStainlessHelper(file)) {
|
||||
return { 'x-stainless-helper': file[SDK_HELPER_SYMBOL] };
|
||||
}
|
||||
return {};
|
||||
}
|
||||
//# sourceMappingURL=stainless-helper-header.mjs.map
|
||||
372
extracted-source/node_modules/@anthropic-ai/sdk/lib/tools/BetaToolRunner.mjs
generated
vendored
Normal file
372
extracted-source/node_modules/@anthropic-ai/sdk/lib/tools/BetaToolRunner.mjs
generated
vendored
Normal file
@@ -0,0 +1,372 @@
|
||||
var _BetaToolRunner_instances, _BetaToolRunner_consumed, _BetaToolRunner_mutated, _BetaToolRunner_state, _BetaToolRunner_options, _BetaToolRunner_message, _BetaToolRunner_toolResponse, _BetaToolRunner_completion, _BetaToolRunner_iterationCount, _BetaToolRunner_checkAndCompact, _BetaToolRunner_generateToolResponse;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../../internal/tslib.mjs";
|
||||
import { ToolError } from "./ToolError.mjs";
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
import { buildHeaders } from "../../internal/headers.mjs";
|
||||
import { DEFAULT_SUMMARY_PROMPT, DEFAULT_TOKEN_THRESHOLD } from "./CompactionControl.mjs";
|
||||
import { collectStainlessHelpers } from "../stainless-helper-header.mjs";
|
||||
/**
|
||||
* Just Promise.withResolvers(), which is not available in all environments.
|
||||
*/
|
||||
function promiseWithResolvers() {
|
||||
let resolve;
|
||||
let reject;
|
||||
const promise = new Promise((res, rej) => {
|
||||
resolve = res;
|
||||
reject = rej;
|
||||
});
|
||||
return { promise, resolve: resolve, reject: reject };
|
||||
}
|
||||
/**
|
||||
* A ToolRunner handles the automatic conversation loop between the assistant and tools.
|
||||
*
|
||||
* A ToolRunner is an async iterable that yields either BetaMessage or BetaMessageStream objects
|
||||
* depending on the streaming configuration.
|
||||
*/
|
||||
export class BetaToolRunner {
|
||||
constructor(client, params, options) {
|
||||
_BetaToolRunner_instances.add(this);
|
||||
this.client = client;
|
||||
/** Whether the async iterator has been consumed */
|
||||
_BetaToolRunner_consumed.set(this, false);
|
||||
/** Whether parameters have been mutated since the last API call */
|
||||
_BetaToolRunner_mutated.set(this, false);
|
||||
/** Current state containing the request parameters */
|
||||
_BetaToolRunner_state.set(this, void 0);
|
||||
_BetaToolRunner_options.set(this, void 0);
|
||||
/** Promise for the last message received from the assistant */
|
||||
_BetaToolRunner_message.set(this, void 0);
|
||||
/** Cached tool response to avoid redundant executions */
|
||||
_BetaToolRunner_toolResponse.set(this, void 0);
|
||||
/** Promise resolvers for waiting on completion */
|
||||
_BetaToolRunner_completion.set(this, void 0);
|
||||
/** Number of iterations (API requests) made so far */
|
||||
_BetaToolRunner_iterationCount.set(this, 0);
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_state, {
|
||||
params: {
|
||||
// You can't clone the entire params since there are functions as handlers.
|
||||
// You also don't really need to clone params.messages, but it probably will prevent a foot gun
|
||||
// somewhere.
|
||||
...params,
|
||||
messages: structuredClone(params.messages),
|
||||
},
|
||||
}, "f");
|
||||
const helpers = collectStainlessHelpers(params.tools, params.messages);
|
||||
const helperValue = ['BetaToolRunner', ...helpers].join(', ');
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_options, {
|
||||
...options,
|
||||
headers: buildHeaders([{ 'x-stainless-helper': helperValue }, options?.headers]),
|
||||
}, "f");
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_completion, promiseWithResolvers(), "f");
|
||||
}
|
||||
async *[(_BetaToolRunner_consumed = new WeakMap(), _BetaToolRunner_mutated = new WeakMap(), _BetaToolRunner_state = new WeakMap(), _BetaToolRunner_options = new WeakMap(), _BetaToolRunner_message = new WeakMap(), _BetaToolRunner_toolResponse = new WeakMap(), _BetaToolRunner_completion = new WeakMap(), _BetaToolRunner_iterationCount = new WeakMap(), _BetaToolRunner_instances = new WeakSet(), _BetaToolRunner_checkAndCompact = async function _BetaToolRunner_checkAndCompact() {
|
||||
const compactionControl = __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.compactionControl;
|
||||
if (!compactionControl || !compactionControl.enabled) {
|
||||
return false;
|
||||
}
|
||||
let tokensUsed = 0;
|
||||
if (__classPrivateFieldGet(this, _BetaToolRunner_message, "f") !== undefined) {
|
||||
try {
|
||||
const message = await __classPrivateFieldGet(this, _BetaToolRunner_message, "f");
|
||||
const totalInputTokens = message.usage.input_tokens +
|
||||
(message.usage.cache_creation_input_tokens ?? 0) +
|
||||
(message.usage.cache_read_input_tokens ?? 0);
|
||||
tokensUsed = totalInputTokens + message.usage.output_tokens;
|
||||
}
|
||||
catch {
|
||||
// If we can't get the message, skip compaction
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const threshold = compactionControl.contextTokenThreshold ?? DEFAULT_TOKEN_THRESHOLD;
|
||||
if (tokensUsed < threshold) {
|
||||
return false;
|
||||
}
|
||||
const model = compactionControl.model ?? __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.model;
|
||||
const summaryPrompt = compactionControl.summaryPrompt ?? DEFAULT_SUMMARY_PROMPT;
|
||||
const messages = __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages;
|
||||
if (messages[messages.length - 1].role === 'assistant') {
|
||||
// Remove tool_use blocks from the last message to avoid 400 error
|
||||
// (tool_use requires tool_result, which we don't have yet)
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
if (Array.isArray(lastMessage.content)) {
|
||||
const nonToolBlocks = lastMessage.content.filter((block) => block.type !== 'tool_use');
|
||||
if (nonToolBlocks.length === 0) {
|
||||
// If all blocks were tool_use, just remove the message entirely
|
||||
messages.pop();
|
||||
}
|
||||
else {
|
||||
lastMessage.content = nonToolBlocks;
|
||||
}
|
||||
}
|
||||
}
|
||||
const response = await this.client.beta.messages.create({
|
||||
model,
|
||||
messages: [
|
||||
...messages,
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: summaryPrompt,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
max_tokens: __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.max_tokens,
|
||||
}, {
|
||||
headers: { 'x-stainless-helper': 'compaction' },
|
||||
});
|
||||
if (response.content[0]?.type !== 'text') {
|
||||
throw new AnthropicError('Expected text response for compaction');
|
||||
}
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages = [
|
||||
{
|
||||
role: 'user',
|
||||
content: response.content,
|
||||
},
|
||||
];
|
||||
return true;
|
||||
}, Symbol.asyncIterator)]() {
|
||||
var _a;
|
||||
if (__classPrivateFieldGet(this, _BetaToolRunner_consumed, "f")) {
|
||||
throw new AnthropicError('Cannot iterate over a consumed stream');
|
||||
}
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_consumed, true, "f");
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_mutated, true, "f");
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_toolResponse, undefined, "f");
|
||||
try {
|
||||
while (true) {
|
||||
let stream;
|
||||
try {
|
||||
if (__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.max_iterations &&
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_iterationCount, "f") >= __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.max_iterations) {
|
||||
break;
|
||||
}
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_mutated, false, "f");
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_toolResponse, undefined, "f");
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_iterationCount, (_a = __classPrivateFieldGet(this, _BetaToolRunner_iterationCount, "f"), _a++, _a), "f");
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_message, undefined, "f");
|
||||
const { max_iterations, compactionControl, ...params } = __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params;
|
||||
if (params.stream) {
|
||||
stream = this.client.beta.messages.stream({ ...params }, __classPrivateFieldGet(this, _BetaToolRunner_options, "f"));
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_message, stream.finalMessage(), "f");
|
||||
// Make sure that this promise doesn't throw before we get the option to do something about it.
|
||||
// Error will be caught when we call await this.#message ultimately
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_message, "f").catch(() => { });
|
||||
yield stream;
|
||||
}
|
||||
else {
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_message, this.client.beta.messages.create({ ...params, stream: false }, __classPrivateFieldGet(this, _BetaToolRunner_options, "f")), "f");
|
||||
yield __classPrivateFieldGet(this, _BetaToolRunner_message, "f");
|
||||
}
|
||||
const isCompacted = await __classPrivateFieldGet(this, _BetaToolRunner_instances, "m", _BetaToolRunner_checkAndCompact).call(this);
|
||||
if (!isCompacted) {
|
||||
if (!__classPrivateFieldGet(this, _BetaToolRunner_mutated, "f")) {
|
||||
const { role, content } = await __classPrivateFieldGet(this, _BetaToolRunner_message, "f");
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages.push({ role, content });
|
||||
}
|
||||
const toolMessage = await __classPrivateFieldGet(this, _BetaToolRunner_instances, "m", _BetaToolRunner_generateToolResponse).call(this, __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages.at(-1));
|
||||
if (toolMessage) {
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params.messages.push(toolMessage);
|
||||
}
|
||||
else if (!__classPrivateFieldGet(this, _BetaToolRunner_mutated, "f")) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if (stream) {
|
||||
stream.abort();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!__classPrivateFieldGet(this, _BetaToolRunner_message, "f")) {
|
||||
throw new AnthropicError('ToolRunner concluded without a message from the server');
|
||||
}
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_completion, "f").resolve(await __classPrivateFieldGet(this, _BetaToolRunner_message, "f"));
|
||||
}
|
||||
catch (error) {
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_consumed, false, "f");
|
||||
// Silence unhandled promise errors
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_completion, "f").promise.catch(() => { });
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_completion, "f").reject(error);
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_completion, promiseWithResolvers(), "f");
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
setMessagesParams(paramsOrMutator) {
|
||||
if (typeof paramsOrMutator === 'function') {
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params = paramsOrMutator(__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params);
|
||||
}
|
||||
else {
|
||||
__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params = paramsOrMutator;
|
||||
}
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_mutated, true, "f");
|
||||
// Invalidate cached tool response since parameters changed
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_toolResponse, undefined, "f");
|
||||
}
|
||||
/**
|
||||
* Get the tool response for the last message from the assistant.
|
||||
* Avoids redundant tool executions by caching results.
|
||||
*
|
||||
* @returns A promise that resolves to a BetaMessageParam containing tool results, or null if no tools need to be executed
|
||||
*
|
||||
* @example
|
||||
* const toolResponse = await runner.generateToolResponse();
|
||||
* if (toolResponse) {
|
||||
* console.log('Tool results:', toolResponse.content);
|
||||
* }
|
||||
*/
|
||||
async generateToolResponse() {
|
||||
const message = (await __classPrivateFieldGet(this, _BetaToolRunner_message, "f")) ?? this.params.messages.at(-1);
|
||||
if (!message) {
|
||||
return null;
|
||||
}
|
||||
return __classPrivateFieldGet(this, _BetaToolRunner_instances, "m", _BetaToolRunner_generateToolResponse).call(this, message);
|
||||
}
|
||||
/**
|
||||
* Wait for the async iterator to complete. This works even if the async iterator hasn't yet started, and
|
||||
* will wait for an instance to start and go to completion.
|
||||
*
|
||||
* @returns A promise that resolves to the final BetaMessage when the iterator completes
|
||||
*
|
||||
* @example
|
||||
* // Start consuming the iterator
|
||||
* for await (const message of runner) {
|
||||
* console.log('Message:', message.content);
|
||||
* }
|
||||
*
|
||||
* // Meanwhile, wait for completion from another part of the code
|
||||
* const finalMessage = await runner.done();
|
||||
* console.log('Final response:', finalMessage.content);
|
||||
*/
|
||||
done() {
|
||||
return __classPrivateFieldGet(this, _BetaToolRunner_completion, "f").promise;
|
||||
}
|
||||
/**
|
||||
* Returns a promise indicating that the stream is done. Unlike .done(), this will eagerly read the stream:
|
||||
* * If the iterator has not been consumed, consume the entire iterator and return the final message from the
|
||||
* assistant.
|
||||
* * If the iterator has been consumed, waits for it to complete and returns the final message.
|
||||
*
|
||||
* @returns A promise that resolves to the final BetaMessage from the conversation
|
||||
* @throws {AnthropicError} If no messages were processed during the conversation
|
||||
*
|
||||
* @example
|
||||
* const finalMessage = await runner.runUntilDone();
|
||||
* console.log('Final response:', finalMessage.content);
|
||||
*/
|
||||
async runUntilDone() {
|
||||
// If not yet consumed, start consuming and wait for completion
|
||||
if (!__classPrivateFieldGet(this, _BetaToolRunner_consumed, "f")) {
|
||||
for await (const _ of this) {
|
||||
// Iterator naturally populates this.#message
|
||||
}
|
||||
}
|
||||
// If consumed but not completed, wait for completion
|
||||
return this.done();
|
||||
}
|
||||
/**
|
||||
* Get the current parameters being used by the ToolRunner.
|
||||
*
|
||||
* @returns A readonly view of the current ToolRunnerParams
|
||||
*
|
||||
* @example
|
||||
* const currentParams = runner.params;
|
||||
* console.log('Current model:', currentParams.model);
|
||||
* console.log('Message count:', currentParams.messages.length);
|
||||
*/
|
||||
get params() {
|
||||
return __classPrivateFieldGet(this, _BetaToolRunner_state, "f").params;
|
||||
}
|
||||
/**
|
||||
* Add one or more messages to the conversation history.
|
||||
*
|
||||
* @param messages - One or more BetaMessageParam objects to add to the conversation
|
||||
*
|
||||
* @example
|
||||
* runner.pushMessages(
|
||||
* { role: 'user', content: 'Also, what about the weather in NYC?' }
|
||||
* );
|
||||
*
|
||||
* @example
|
||||
* // Adding multiple messages
|
||||
* runner.pushMessages(
|
||||
* { role: 'user', content: 'What about NYC?' },
|
||||
* { role: 'user', content: 'And Boston?' }
|
||||
* );
|
||||
*/
|
||||
pushMessages(...messages) {
|
||||
this.setMessagesParams((params) => ({
|
||||
...params,
|
||||
messages: [...params.messages, ...messages],
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Makes the ToolRunner directly awaitable, equivalent to calling .runUntilDone()
|
||||
* This allows using `await runner` instead of `await runner.runUntilDone()`
|
||||
*/
|
||||
then(onfulfilled, onrejected) {
|
||||
return this.runUntilDone().then(onfulfilled, onrejected);
|
||||
}
|
||||
}
|
||||
_BetaToolRunner_generateToolResponse = async function _BetaToolRunner_generateToolResponse(lastMessage) {
|
||||
if (__classPrivateFieldGet(this, _BetaToolRunner_toolResponse, "f") !== undefined) {
|
||||
return __classPrivateFieldGet(this, _BetaToolRunner_toolResponse, "f");
|
||||
}
|
||||
__classPrivateFieldSet(this, _BetaToolRunner_toolResponse, generateToolResponse(__classPrivateFieldGet(this, _BetaToolRunner_state, "f").params, lastMessage), "f");
|
||||
return __classPrivateFieldGet(this, _BetaToolRunner_toolResponse, "f");
|
||||
};
|
||||
async function generateToolResponse(params, lastMessage = params.messages.at(-1)) {
|
||||
// Only process if the last message is from the assistant and has tool use blocks
|
||||
if (!lastMessage ||
|
||||
lastMessage.role !== 'assistant' ||
|
||||
!lastMessage.content ||
|
||||
typeof lastMessage.content === 'string') {
|
||||
return null;
|
||||
}
|
||||
const toolUseBlocks = lastMessage.content.filter((content) => content.type === 'tool_use');
|
||||
if (toolUseBlocks.length === 0) {
|
||||
return null;
|
||||
}
|
||||
const toolResults = await Promise.all(toolUseBlocks.map(async (toolUse) => {
|
||||
const tool = params.tools.find((t) => ('name' in t ? t.name : t.mcp_server_name) === toolUse.name);
|
||||
if (!tool || !('run' in tool)) {
|
||||
return {
|
||||
type: 'tool_result',
|
||||
tool_use_id: toolUse.id,
|
||||
content: `Error: Tool '${toolUse.name}' not found`,
|
||||
is_error: true,
|
||||
};
|
||||
}
|
||||
try {
|
||||
let input = toolUse.input;
|
||||
if ('parse' in tool && tool.parse) {
|
||||
input = tool.parse(input);
|
||||
}
|
||||
const result = await tool.run(input);
|
||||
return {
|
||||
type: 'tool_result',
|
||||
tool_use_id: toolUse.id,
|
||||
content: result,
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
return {
|
||||
type: 'tool_result',
|
||||
tool_use_id: toolUse.id,
|
||||
content: error instanceof ToolError ?
|
||||
error.content
|
||||
: `Error: ${error instanceof Error ? error.message : String(error)}`,
|
||||
is_error: true,
|
||||
};
|
||||
}
|
||||
}));
|
||||
return {
|
||||
role: 'user',
|
||||
content: toolResults,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=BetaToolRunner.mjs.map
|
||||
25
extracted-source/node_modules/@anthropic-ai/sdk/lib/tools/CompactionControl.mjs
generated
vendored
Normal file
25
extracted-source/node_modules/@anthropic-ai/sdk/lib/tools/CompactionControl.mjs
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
export const DEFAULT_TOKEN_THRESHOLD = 100000;
|
||||
export const DEFAULT_SUMMARY_PROMPT = `You have been working on the task described above but have not yet completed it. Write a continuation summary that will allow you (or another instance of yourself) to resume work efficiently in a future context window where the conversation history will be replaced with this summary. Your summary should be structured, concise, and actionable. Include:
|
||||
1. Task Overview
|
||||
The user's core request and success criteria
|
||||
Any clarifications or constraints they specified
|
||||
2. Current State
|
||||
What has been completed so far
|
||||
Files created, modified, or analyzed (with paths if relevant)
|
||||
Key outputs or artifacts produced
|
||||
3. Important Discoveries
|
||||
Technical constraints or requirements uncovered
|
||||
Decisions made and their rationale
|
||||
Errors encountered and how they were resolved
|
||||
What approaches were tried that didn't work (and why)
|
||||
4. Next Steps
|
||||
Specific actions needed to complete the task
|
||||
Any blockers or open questions to resolve
|
||||
Priority order if multiple steps remain
|
||||
5. Context to Preserve
|
||||
User preferences or style requirements
|
||||
Domain-specific details that aren't obvious
|
||||
Any promises made to the user
|
||||
Be concise but complete—err on the side of including information that would prevent duplicate work or repeated mistakes. Write in a way that enables immediate resumption of the task.
|
||||
Wrap your summary in <summary></summary> tags.`;
|
||||
//# sourceMappingURL=CompactionControl.mjs.map
|
||||
38
extracted-source/node_modules/@anthropic-ai/sdk/lib/tools/ToolError.mjs
generated
vendored
Normal file
38
extracted-source/node_modules/@anthropic-ai/sdk/lib/tools/ToolError.mjs
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* An error that can be thrown from a tool's `run` method to return structured
|
||||
* content blocks as the error result, rather than just a string message.
|
||||
*
|
||||
* When the ToolRunner catches this error, it will use the `content` property
|
||||
* as the tool result with `is_error: true`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const tool = {
|
||||
* name: 'my_tool',
|
||||
* run: async (input) => {
|
||||
* if (somethingWentWrong) {
|
||||
* throw new ToolError([
|
||||
* { type: 'text', text: 'Error details here' },
|
||||
* { type: 'image', source: { type: 'base64', data: '...', media_type: 'image/png' } },
|
||||
* ]);
|
||||
* }
|
||||
* return 'success';
|
||||
* },
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export class ToolError extends Error {
|
||||
constructor(content) {
|
||||
const message = typeof content === 'string' ? content : (content
|
||||
.map((block) => {
|
||||
if (block.type === 'text')
|
||||
return block.text;
|
||||
return `[${block.type}]`;
|
||||
})
|
||||
.join(' '));
|
||||
super(message);
|
||||
this.name = 'ToolError';
|
||||
this.content = content;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=ToolError.mjs.map
|
||||
24
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/beta.mjs
generated
vendored
Normal file
24
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/beta.mjs
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../../core/resource.mjs";
|
||||
import * as FilesAPI from "./files.mjs";
|
||||
import { Files, } from "./files.mjs";
|
||||
import * as ModelsAPI from "./models.mjs";
|
||||
import { Models } from "./models.mjs";
|
||||
import * as MessagesAPI from "./messages/messages.mjs";
|
||||
import { Messages, } from "./messages/messages.mjs";
|
||||
import * as SkillsAPI from "./skills/skills.mjs";
|
||||
import { Skills, } from "./skills/skills.mjs";
|
||||
export class Beta extends APIResource {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.models = new ModelsAPI.Models(this._client);
|
||||
this.messages = new MessagesAPI.Messages(this._client);
|
||||
this.files = new FilesAPI.Files(this._client);
|
||||
this.skills = new SkillsAPI.Skills(this._client);
|
||||
}
|
||||
}
|
||||
Beta.Models = Models;
|
||||
Beta.Messages = Messages;
|
||||
Beta.Files = Files;
|
||||
Beta.Skills = Skills;
|
||||
//# sourceMappingURL=beta.mjs.map
|
||||
120
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/files.mjs
generated
vendored
Normal file
120
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/files.mjs
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../../core/resource.mjs";
|
||||
import { Page } from "../../core/pagination.mjs";
|
||||
import { buildHeaders } from "../../internal/headers.mjs";
|
||||
import { stainlessHelperHeaderFromFile } from "../../lib/stainless-helper-header.mjs";
|
||||
import { multipartFormRequestOptions } from "../../internal/uploads.mjs";
|
||||
import { path } from "../../internal/utils/path.mjs";
|
||||
export class Files extends APIResource {
|
||||
/**
|
||||
* List Files
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Automatically fetches more pages as needed.
|
||||
* for await (const fileMetadata of client.beta.files.list()) {
|
||||
* // ...
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
list(params = {}, options) {
|
||||
const { betas, ...query } = params ?? {};
|
||||
return this._client.getAPIList('/v1/files', (Page), {
|
||||
query,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'files-api-2025-04-14'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Delete File
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const deletedFile = await client.beta.files.delete(
|
||||
* 'file_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
delete(fileID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.delete(path `/v1/files/${fileID}`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'files-api-2025-04-14'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download File
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const response = await client.beta.files.download(
|
||||
* 'file_id',
|
||||
* );
|
||||
*
|
||||
* const content = await response.blob();
|
||||
* console.log(content);
|
||||
* ```
|
||||
*/
|
||||
download(fileID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.get(path `/v1/files/${fileID}/content`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{
|
||||
'anthropic-beta': [...(betas ?? []), 'files-api-2025-04-14'].toString(),
|
||||
Accept: 'application/binary',
|
||||
},
|
||||
options?.headers,
|
||||
]),
|
||||
__binaryResponse: true,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get File Metadata
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const fileMetadata =
|
||||
* await client.beta.files.retrieveMetadata('file_id');
|
||||
* ```
|
||||
*/
|
||||
retrieveMetadata(fileID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.get(path `/v1/files/${fileID}`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'files-api-2025-04-14'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Upload File
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const fileMetadata = await client.beta.files.upload({
|
||||
* file: fs.createReadStream('path/to/file'),
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
upload(params, options) {
|
||||
const { betas, ...body } = params;
|
||||
return this._client.post('/v1/files', multipartFormRequestOptions({
|
||||
body,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'files-api-2025-04-14'].toString() },
|
||||
stainlessHelperHeaderFromFile(body.file),
|
||||
options?.headers,
|
||||
]),
|
||||
}, this._client));
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=files.mjs.map
|
||||
200
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/messages/batches.mjs
generated
vendored
Normal file
200
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/messages/batches.mjs
generated
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../../../core/resource.mjs";
|
||||
import { Page } from "../../../core/pagination.mjs";
|
||||
import { buildHeaders } from "../../../internal/headers.mjs";
|
||||
import { JSONLDecoder } from "../../../internal/decoders/jsonl.mjs";
|
||||
import { AnthropicError } from "../../../error.mjs";
|
||||
import { path } from "../../../internal/utils/path.mjs";
|
||||
export class Batches extends APIResource {
|
||||
/**
|
||||
* Send a batch of Message creation requests.
|
||||
*
|
||||
* The Message Batches API can be used to process multiple Messages API requests at
|
||||
* once. Once a Message Batch is created, it begins processing immediately. Batches
|
||||
* can take up to 24 hours to complete.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const betaMessageBatch =
|
||||
* await client.beta.messages.batches.create({
|
||||
* requests: [
|
||||
* {
|
||||
* custom_id: 'my-custom-id-1',
|
||||
* params: {
|
||||
* max_tokens: 1024,
|
||||
* messages: [
|
||||
* { content: 'Hello, world', role: 'user' },
|
||||
* ],
|
||||
* model: 'claude-opus-4-6',
|
||||
* },
|
||||
* },
|
||||
* ],
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
create(params, options) {
|
||||
const { betas, ...body } = params;
|
||||
return this._client.post('/v1/messages/batches?beta=true', {
|
||||
body,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'message-batches-2024-09-24'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* This endpoint is idempotent and can be used to poll for Message Batch
|
||||
* completion. To access the results of a Message Batch, make a request to the
|
||||
* `results_url` field in the response.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const betaMessageBatch =
|
||||
* await client.beta.messages.batches.retrieve(
|
||||
* 'message_batch_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
retrieve(messageBatchID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.get(path `/v1/messages/batches/${messageBatchID}?beta=true`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'message-batches-2024-09-24'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* List all Message Batches within a Workspace. Most recently created batches are
|
||||
* returned first.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Automatically fetches more pages as needed.
|
||||
* for await (const betaMessageBatch of client.beta.messages.batches.list()) {
|
||||
* // ...
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
list(params = {}, options) {
|
||||
const { betas, ...query } = params ?? {};
|
||||
return this._client.getAPIList('/v1/messages/batches?beta=true', (Page), {
|
||||
query,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'message-batches-2024-09-24'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Delete a Message Batch.
|
||||
*
|
||||
* Message Batches can only be deleted once they've finished processing. If you'd
|
||||
* like to delete an in-progress batch, you must first cancel it.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const betaDeletedMessageBatch =
|
||||
* await client.beta.messages.batches.delete(
|
||||
* 'message_batch_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
delete(messageBatchID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.delete(path `/v1/messages/batches/${messageBatchID}?beta=true`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'message-batches-2024-09-24'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Batches may be canceled any time before processing ends. Once cancellation is
|
||||
* initiated, the batch enters a `canceling` state, at which time the system may
|
||||
* complete any in-progress, non-interruptible requests before finalizing
|
||||
* cancellation.
|
||||
*
|
||||
* The number of canceled requests is specified in `request_counts`. To determine
|
||||
* which requests were canceled, check the individual results within the batch.
|
||||
* Note that cancellation may not result in any canceled requests if they were
|
||||
* non-interruptible.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const betaMessageBatch =
|
||||
* await client.beta.messages.batches.cancel(
|
||||
* 'message_batch_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
cancel(messageBatchID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.post(path `/v1/messages/batches/${messageBatchID}/cancel?beta=true`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'message-batches-2024-09-24'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Streams the results of a Message Batch as a `.jsonl` file.
|
||||
*
|
||||
* Each line in the file is a JSON object containing the result of a single request
|
||||
* in the Message Batch. Results are not guaranteed to be in the same order as
|
||||
* requests. Use the `custom_id` field to match results to requests.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const betaMessageBatchIndividualResponse =
|
||||
* await client.beta.messages.batches.results(
|
||||
* 'message_batch_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
async results(messageBatchID, params = {}, options) {
|
||||
const batch = await this.retrieve(messageBatchID);
|
||||
if (!batch.results_url) {
|
||||
throw new AnthropicError(`No batch \`results_url\`; Has it finished processing? ${batch.processing_status} - ${batch.id}`);
|
||||
}
|
||||
const { betas } = params ?? {};
|
||||
return this._client
|
||||
.get(batch.results_url, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{
|
||||
'anthropic-beta': [...(betas ?? []), 'message-batches-2024-09-24'].toString(),
|
||||
Accept: 'application/binary',
|
||||
},
|
||||
options?.headers,
|
||||
]),
|
||||
stream: true,
|
||||
__binaryResponse: true,
|
||||
})
|
||||
._thenUnwrap((_, props) => JSONLDecoder.fromResponse(props.response, props.controller));
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=batches.mjs.map
|
||||
156
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/messages/messages.mjs
generated
vendored
Normal file
156
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/messages/messages.mjs
generated
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../../error.mjs";
|
||||
import { APIResource } from "../../../core/resource.mjs";
|
||||
import { MODEL_NONSTREAMING_TOKENS } from "../../../internal/constants.mjs";
|
||||
import { buildHeaders } from "../../../internal/headers.mjs";
|
||||
import { stainlessHelperHeader } from "../../../lib/stainless-helper-header.mjs";
|
||||
import { parseBetaMessage, } from "../../../lib/beta-parser.mjs";
|
||||
import { BetaMessageStream } from "../../../lib/BetaMessageStream.mjs";
|
||||
import { BetaToolRunner, } from "../../../lib/tools/BetaToolRunner.mjs";
|
||||
import { ToolError } from "../../../lib/tools/ToolError.mjs";
|
||||
import * as BatchesAPI from "./batches.mjs";
|
||||
import { Batches, } from "./batches.mjs";
|
||||
const DEPRECATED_MODELS = {
|
||||
'claude-1.3': 'November 6th, 2024',
|
||||
'claude-1.3-100k': 'November 6th, 2024',
|
||||
'claude-instant-1.1': 'November 6th, 2024',
|
||||
'claude-instant-1.1-100k': 'November 6th, 2024',
|
||||
'claude-instant-1.2': 'November 6th, 2024',
|
||||
'claude-3-sonnet-20240229': 'July 21st, 2025',
|
||||
'claude-3-opus-20240229': 'January 5th, 2026',
|
||||
'claude-2.1': 'July 21st, 2025',
|
||||
'claude-2.0': 'July 21st, 2025',
|
||||
'claude-3-7-sonnet-latest': 'February 19th, 2026',
|
||||
'claude-3-7-sonnet-20250219': 'February 19th, 2026',
|
||||
};
|
||||
const MODELS_TO_WARN_WITH_THINKING_ENABLED = ['claude-opus-4-6'];
|
||||
export class Messages extends APIResource {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.batches = new BatchesAPI.Batches(this._client);
|
||||
}
|
||||
create(params, options) {
|
||||
// Transform deprecated output_format to output_config.format
|
||||
const modifiedParams = transformOutputFormat(params);
|
||||
const { betas, ...body } = modifiedParams;
|
||||
if (body.model in DEPRECATED_MODELS) {
|
||||
console.warn(`The model '${body.model}' is deprecated and will reach end-of-life on ${DEPRECATED_MODELS[body.model]}\nPlease migrate to a newer model. Visit https://docs.anthropic.com/en/docs/resources/model-deprecations for more information.`);
|
||||
}
|
||||
if (body.model in MODELS_TO_WARN_WITH_THINKING_ENABLED &&
|
||||
body.thinking &&
|
||||
body.thinking.type === 'enabled') {
|
||||
console.warn(`Using Claude with ${body.model} and 'thinking.type=enabled' is deprecated. Use 'thinking.type=adaptive' instead which results in better model performance in our testing: https://platform.claude.com/docs/en/build-with-claude/adaptive-thinking`);
|
||||
}
|
||||
let timeout = this._client._options.timeout;
|
||||
if (!body.stream && timeout == null) {
|
||||
const maxNonstreamingTokens = MODEL_NONSTREAMING_TOKENS[body.model] ?? undefined;
|
||||
timeout = this._client.calculateNonstreamingTimeout(body.max_tokens, maxNonstreamingTokens);
|
||||
}
|
||||
// Collect helper info from tools and messages
|
||||
const helperHeader = stainlessHelperHeader(body.tools, body.messages);
|
||||
return this._client.post('/v1/messages?beta=true', {
|
||||
body,
|
||||
timeout: timeout ?? 600000,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ ...(betas?.toString() != null ? { 'anthropic-beta': betas?.toString() } : undefined) },
|
||||
helperHeader,
|
||||
options?.headers,
|
||||
]),
|
||||
stream: modifiedParams.stream ?? false,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Send a structured list of input messages with text and/or image content, along with an expected `output_format` and
|
||||
* the response will be automatically parsed and available in the `parsed_output` property of the message.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const message = await client.beta.messages.parse({
|
||||
* model: 'claude-3-5-sonnet-20241022',
|
||||
* max_tokens: 1024,
|
||||
* messages: [{ role: 'user', content: 'What is 2+2?' }],
|
||||
* output_format: zodOutputFormat(z.object({ answer: z.number() }), 'math'),
|
||||
* });
|
||||
*
|
||||
* console.log(message.parsed_output?.answer); // 4
|
||||
* ```
|
||||
*/
|
||||
parse(params, options) {
|
||||
options = {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(params.betas ?? []), 'structured-outputs-2025-12-15'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
};
|
||||
return this.create(params, options).then((message) => parseBetaMessage(message, params, { logger: this._client.logger ?? console }));
|
||||
}
|
||||
/**
|
||||
* Create a Message stream
|
||||
*/
|
||||
stream(body, options) {
|
||||
return BetaMessageStream.createMessage(this, body, options);
|
||||
}
|
||||
/**
|
||||
* Count the number of tokens in a Message.
|
||||
*
|
||||
* The Token Count API can be used to count the number of tokens in a Message,
|
||||
* including tools, images, and documents, without creating it.
|
||||
*
|
||||
* Learn more about token counting in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/token-counting)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const betaMessageTokensCount =
|
||||
* await client.beta.messages.countTokens({
|
||||
* messages: [{ content: 'string', role: 'user' }],
|
||||
* model: 'claude-opus-4-6',
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
countTokens(params, options) {
|
||||
// Transform deprecated output_format to output_config.format
|
||||
const modifiedParams = transformOutputFormat(params);
|
||||
const { betas, ...body } = modifiedParams;
|
||||
return this._client.post('/v1/messages/count_tokens?beta=true', {
|
||||
body,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'token-counting-2024-11-01'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
toolRunner(body, options) {
|
||||
return new BetaToolRunner(this._client, body, options);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Transform deprecated output_format to output_config.format
|
||||
* Returns a modified copy of the params without mutating the original
|
||||
*/
|
||||
function transformOutputFormat(params) {
|
||||
if (!params.output_format) {
|
||||
return params;
|
||||
}
|
||||
if (params.output_config?.format) {
|
||||
throw new AnthropicError('Both output_format and output_config.format were provided. ' +
|
||||
'Please use only output_config.format (output_format is deprecated).');
|
||||
}
|
||||
const { output_format, ...rest } = params;
|
||||
return {
|
||||
...rest,
|
||||
output_config: {
|
||||
...params.output_config,
|
||||
format: output_format,
|
||||
},
|
||||
};
|
||||
}
|
||||
export { BetaToolRunner } from "../../../lib/tools/BetaToolRunner.mjs";
|
||||
export { ToolError } from "../../../lib/tools/ToolError.mjs";
|
||||
Messages.Batches = Batches;
|
||||
Messages.BetaToolRunner = BetaToolRunner;
|
||||
Messages.ToolError = ToolError;
|
||||
//# sourceMappingURL=messages.mjs.map
|
||||
56
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/models.mjs
generated
vendored
Normal file
56
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/models.mjs
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../../core/resource.mjs";
|
||||
import { Page } from "../../core/pagination.mjs";
|
||||
import { buildHeaders } from "../../internal/headers.mjs";
|
||||
import { path } from "../../internal/utils/path.mjs";
|
||||
export class Models extends APIResource {
|
||||
/**
|
||||
* Get a specific model.
|
||||
*
|
||||
* The Models API response can be used to determine information about a specific
|
||||
* model or resolve a model alias to a model ID.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const betaModelInfo = await client.beta.models.retrieve(
|
||||
* 'model_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
retrieve(modelID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.get(path `/v1/models/${modelID}?beta=true`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ ...(betas?.toString() != null ? { 'anthropic-beta': betas?.toString() } : undefined) },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* List available models.
|
||||
*
|
||||
* The Models API response can be used to determine which models are available for
|
||||
* use in the API. More recently released models are listed first.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Automatically fetches more pages as needed.
|
||||
* for await (const betaModelInfo of client.beta.models.list()) {
|
||||
* // ...
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
list(params = {}, options) {
|
||||
const { betas, ...query } = params ?? {};
|
||||
return this._client.getAPIList('/v1/models?beta=true', (Page), {
|
||||
query,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ ...(betas?.toString() != null ? { 'anthropic-beta': betas?.toString() } : undefined) },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=models.mjs.map
|
||||
93
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/skills/skills.mjs
generated
vendored
Normal file
93
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/skills/skills.mjs
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../../../core/resource.mjs";
|
||||
import * as VersionsAPI from "./versions.mjs";
|
||||
import { Versions, } from "./versions.mjs";
|
||||
import { PageCursor } from "../../../core/pagination.mjs";
|
||||
import { buildHeaders } from "../../../internal/headers.mjs";
|
||||
import { multipartFormRequestOptions } from "../../../internal/uploads.mjs";
|
||||
import { path } from "../../../internal/utils/path.mjs";
|
||||
export class Skills extends APIResource {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.versions = new VersionsAPI.Versions(this._client);
|
||||
}
|
||||
/**
|
||||
* Create Skill
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const skill = await client.beta.skills.create();
|
||||
* ```
|
||||
*/
|
||||
create(params = {}, options) {
|
||||
const { betas, ...body } = params ?? {};
|
||||
return this._client.post('/v1/skills?beta=true', multipartFormRequestOptions({
|
||||
body,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'skills-2025-10-02'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
}, this._client, false));
|
||||
}
|
||||
/**
|
||||
* Get Skill
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const skill = await client.beta.skills.retrieve('skill_id');
|
||||
* ```
|
||||
*/
|
||||
retrieve(skillID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.get(path `/v1/skills/${skillID}?beta=true`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'skills-2025-10-02'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* List Skills
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Automatically fetches more pages as needed.
|
||||
* for await (const skillListResponse of client.beta.skills.list()) {
|
||||
* // ...
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
list(params = {}, options) {
|
||||
const { betas, ...query } = params ?? {};
|
||||
return this._client.getAPIList('/v1/skills?beta=true', (PageCursor), {
|
||||
query,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'skills-2025-10-02'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Delete Skill
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const skill = await client.beta.skills.delete('skill_id');
|
||||
* ```
|
||||
*/
|
||||
delete(skillID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.delete(path `/v1/skills/${skillID}?beta=true`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'skills-2025-10-02'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
}
|
||||
Skills.Versions = Versions;
|
||||
//# sourceMappingURL=skills.mjs.map
|
||||
96
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/skills/versions.mjs
generated
vendored
Normal file
96
extracted-source/node_modules/@anthropic-ai/sdk/resources/beta/skills/versions.mjs
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../../../core/resource.mjs";
|
||||
import { PageCursor } from "../../../core/pagination.mjs";
|
||||
import { buildHeaders } from "../../../internal/headers.mjs";
|
||||
import { multipartFormRequestOptions } from "../../../internal/uploads.mjs";
|
||||
import { path } from "../../../internal/utils/path.mjs";
|
||||
export class Versions extends APIResource {
|
||||
/**
|
||||
* Create Skill Version
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const version = await client.beta.skills.versions.create(
|
||||
* 'skill_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
create(skillID, params = {}, options) {
|
||||
const { betas, ...body } = params ?? {};
|
||||
return this._client.post(path `/v1/skills/${skillID}/versions?beta=true`, multipartFormRequestOptions({
|
||||
body,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'skills-2025-10-02'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
}, this._client));
|
||||
}
|
||||
/**
|
||||
* Get Skill Version
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const version = await client.beta.skills.versions.retrieve(
|
||||
* 'version',
|
||||
* { skill_id: 'skill_id' },
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
retrieve(version, params, options) {
|
||||
const { skill_id, betas } = params;
|
||||
return this._client.get(path `/v1/skills/${skill_id}/versions/${version}?beta=true`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'skills-2025-10-02'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* List Skill Versions
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Automatically fetches more pages as needed.
|
||||
* for await (const versionListResponse of client.beta.skills.versions.list(
|
||||
* 'skill_id',
|
||||
* )) {
|
||||
* // ...
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
list(skillID, params = {}, options) {
|
||||
const { betas, ...query } = params ?? {};
|
||||
return this._client.getAPIList(path `/v1/skills/${skillID}/versions?beta=true`, (PageCursor), {
|
||||
query,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'skills-2025-10-02'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Delete Skill Version
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const version = await client.beta.skills.versions.delete(
|
||||
* 'version',
|
||||
* { skill_id: 'skill_id' },
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
delete(version, params, options) {
|
||||
const { skill_id, betas } = params;
|
||||
return this._client.delete(path `/v1/skills/${skill_id}/versions/${version}?beta=true`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ 'anthropic-beta': [...(betas ?? []), 'skills-2025-10-02'].toString() },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=versions.mjs.map
|
||||
19
extracted-source/node_modules/@anthropic-ai/sdk/resources/completions.mjs
generated
vendored
Normal file
19
extracted-source/node_modules/@anthropic-ai/sdk/resources/completions.mjs
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../core/resource.mjs";
|
||||
import { buildHeaders } from "../internal/headers.mjs";
|
||||
export class Completions extends APIResource {
|
||||
create(params, options) {
|
||||
const { betas, ...body } = params;
|
||||
return this._client.post('/v1/complete', {
|
||||
body,
|
||||
timeout: this._client._options.timeout ?? 600000,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ ...(betas?.toString() != null ? { 'anthropic-beta': betas?.toString() } : undefined) },
|
||||
options?.headers,
|
||||
]),
|
||||
stream: params.stream ?? false,
|
||||
});
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=completions.mjs.map
|
||||
7
extracted-source/node_modules/@anthropic-ai/sdk/resources/index.mjs
generated
vendored
Normal file
7
extracted-source/node_modules/@anthropic-ai/sdk/resources/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export * from "./shared.mjs";
|
||||
export { Beta, } from "./beta/beta.mjs";
|
||||
export { Completions, } from "./completions.mjs";
|
||||
export { Messages, } from "./messages/messages.mjs";
|
||||
export { Models, } from "./models.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
149
extracted-source/node_modules/@anthropic-ai/sdk/resources/messages/batches.mjs
generated
vendored
Normal file
149
extracted-source/node_modules/@anthropic-ai/sdk/resources/messages/batches.mjs
generated
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../../core/resource.mjs";
|
||||
import { Page } from "../../core/pagination.mjs";
|
||||
import { buildHeaders } from "../../internal/headers.mjs";
|
||||
import { JSONLDecoder } from "../../internal/decoders/jsonl.mjs";
|
||||
import { AnthropicError } from "../../error.mjs";
|
||||
import { path } from "../../internal/utils/path.mjs";
|
||||
export class Batches extends APIResource {
|
||||
/**
|
||||
* Send a batch of Message creation requests.
|
||||
*
|
||||
* The Message Batches API can be used to process multiple Messages API requests at
|
||||
* once. Once a Message Batch is created, it begins processing immediately. Batches
|
||||
* can take up to 24 hours to complete.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const messageBatch = await client.messages.batches.create({
|
||||
* requests: [
|
||||
* {
|
||||
* custom_id: 'my-custom-id-1',
|
||||
* params: {
|
||||
* max_tokens: 1024,
|
||||
* messages: [
|
||||
* { content: 'Hello, world', role: 'user' },
|
||||
* ],
|
||||
* model: 'claude-opus-4-6',
|
||||
* },
|
||||
* },
|
||||
* ],
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
create(body, options) {
|
||||
return this._client.post('/v1/messages/batches', { body, ...options });
|
||||
}
|
||||
/**
|
||||
* This endpoint is idempotent and can be used to poll for Message Batch
|
||||
* completion. To access the results of a Message Batch, make a request to the
|
||||
* `results_url` field in the response.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const messageBatch = await client.messages.batches.retrieve(
|
||||
* 'message_batch_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
retrieve(messageBatchID, options) {
|
||||
return this._client.get(path `/v1/messages/batches/${messageBatchID}`, options);
|
||||
}
|
||||
/**
|
||||
* List all Message Batches within a Workspace. Most recently created batches are
|
||||
* returned first.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Automatically fetches more pages as needed.
|
||||
* for await (const messageBatch of client.messages.batches.list()) {
|
||||
* // ...
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
list(query = {}, options) {
|
||||
return this._client.getAPIList('/v1/messages/batches', (Page), { query, ...options });
|
||||
}
|
||||
/**
|
||||
* Delete a Message Batch.
|
||||
*
|
||||
* Message Batches can only be deleted once they've finished processing. If you'd
|
||||
* like to delete an in-progress batch, you must first cancel it.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const deletedMessageBatch =
|
||||
* await client.messages.batches.delete('message_batch_id');
|
||||
* ```
|
||||
*/
|
||||
delete(messageBatchID, options) {
|
||||
return this._client.delete(path `/v1/messages/batches/${messageBatchID}`, options);
|
||||
}
|
||||
/**
|
||||
* Batches may be canceled any time before processing ends. Once cancellation is
|
||||
* initiated, the batch enters a `canceling` state, at which time the system may
|
||||
* complete any in-progress, non-interruptible requests before finalizing
|
||||
* cancellation.
|
||||
*
|
||||
* The number of canceled requests is specified in `request_counts`. To determine
|
||||
* which requests were canceled, check the individual results within the batch.
|
||||
* Note that cancellation may not result in any canceled requests if they were
|
||||
* non-interruptible.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const messageBatch = await client.messages.batches.cancel(
|
||||
* 'message_batch_id',
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
cancel(messageBatchID, options) {
|
||||
return this._client.post(path `/v1/messages/batches/${messageBatchID}/cancel`, options);
|
||||
}
|
||||
/**
|
||||
* Streams the results of a Message Batch as a `.jsonl` file.
|
||||
*
|
||||
* Each line in the file is a JSON object containing the result of a single request
|
||||
* in the Message Batch. Results are not guaranteed to be in the same order as
|
||||
* requests. Use the `custom_id` field to match results to requests.
|
||||
*
|
||||
* Learn more about the Message Batches API in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/batch-processing)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const messageBatchIndividualResponse =
|
||||
* await client.messages.batches.results('message_batch_id');
|
||||
* ```
|
||||
*/
|
||||
async results(messageBatchID, options) {
|
||||
const batch = await this.retrieve(messageBatchID);
|
||||
if (!batch.results_url) {
|
||||
throw new AnthropicError(`No batch \`results_url\`; Has it finished processing? ${batch.processing_status} - ${batch.id}`);
|
||||
}
|
||||
return this._client
|
||||
.get(batch.results_url, {
|
||||
...options,
|
||||
headers: buildHeaders([{ Accept: 'application/binary' }, options?.headers]),
|
||||
stream: true,
|
||||
__binaryResponse: true,
|
||||
})
|
||||
._thenUnwrap((_, props) => JSONLDecoder.fromResponse(props.response, props.controller));
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=batches.mjs.map
|
||||
123
extracted-source/node_modules/@anthropic-ai/sdk/resources/messages/messages.mjs
generated
vendored
Normal file
123
extracted-source/node_modules/@anthropic-ai/sdk/resources/messages/messages.mjs
generated
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../../core/resource.mjs";
|
||||
import { buildHeaders } from "../../internal/headers.mjs";
|
||||
import { stainlessHelperHeader } from "../../lib/stainless-helper-header.mjs";
|
||||
import { MessageStream } from "../../lib/MessageStream.mjs";
|
||||
import { parseMessage, } from "../../lib/parser.mjs";
|
||||
import * as BatchesAPI from "./batches.mjs";
|
||||
import { Batches, } from "./batches.mjs";
|
||||
import { MODEL_NONSTREAMING_TOKENS } from "../../internal/constants.mjs";
|
||||
export class Messages extends APIResource {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.batches = new BatchesAPI.Batches(this._client);
|
||||
}
|
||||
create(body, options) {
|
||||
if (body.model in DEPRECATED_MODELS) {
|
||||
console.warn(`The model '${body.model}' is deprecated and will reach end-of-life on ${DEPRECATED_MODELS[body.model]}\nPlease migrate to a newer model. Visit https://docs.anthropic.com/en/docs/resources/model-deprecations for more information.`);
|
||||
}
|
||||
if (body.model in MODELS_TO_WARN_WITH_THINKING_ENABLED &&
|
||||
body.thinking &&
|
||||
body.thinking.type === 'enabled') {
|
||||
console.warn(`Using Claude with ${body.model} and 'thinking.type=enabled' is deprecated. Use 'thinking.type=adaptive' instead which results in better model performance in our testing: https://platform.claude.com/docs/en/build-with-claude/adaptive-thinking`);
|
||||
}
|
||||
let timeout = this._client._options.timeout;
|
||||
if (!body.stream && timeout == null) {
|
||||
const maxNonstreamingTokens = MODEL_NONSTREAMING_TOKENS[body.model] ?? undefined;
|
||||
timeout = this._client.calculateNonstreamingTimeout(body.max_tokens, maxNonstreamingTokens);
|
||||
}
|
||||
// Collect helper info from tools and messages
|
||||
const helperHeader = stainlessHelperHeader(body.tools, body.messages);
|
||||
return this._client.post('/v1/messages', {
|
||||
body,
|
||||
timeout: timeout ?? 600000,
|
||||
...options,
|
||||
headers: buildHeaders([helperHeader, options?.headers]),
|
||||
stream: body.stream ?? false,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Send a structured list of input messages with text and/or image content, along with an expected `output_config.format` and
|
||||
* the response will be automatically parsed and available in the `parsed_output` property of the message.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const message = await client.messages.parse({
|
||||
* model: 'claude-sonnet-4-5-20250929',
|
||||
* max_tokens: 1024,
|
||||
* messages: [{ role: 'user', content: 'What is 2+2?' }],
|
||||
* output_config: {
|
||||
* format: zodOutputFormat(z.object({ answer: z.number() })),
|
||||
* },
|
||||
* });
|
||||
*
|
||||
* console.log(message.parsed_output?.answer); // 4
|
||||
* ```
|
||||
*/
|
||||
parse(params, options) {
|
||||
return this.create(params, options).then((message) => parseMessage(message, params, { logger: this._client.logger ?? console }));
|
||||
}
|
||||
/**
|
||||
* Create a Message stream.
|
||||
*
|
||||
* If `output_config.format` is provided with a parseable format (like `zodOutputFormat()`),
|
||||
* the final message will include a `parsed_output` property with the parsed content.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const stream = client.messages.stream({
|
||||
* model: 'claude-sonnet-4-5-20250929',
|
||||
* max_tokens: 1024,
|
||||
* messages: [{ role: 'user', content: 'What is 2+2?' }],
|
||||
* output_config: {
|
||||
* format: zodOutputFormat(z.object({ answer: z.number() })),
|
||||
* },
|
||||
* });
|
||||
*
|
||||
* const message = await stream.finalMessage();
|
||||
* console.log(message.parsed_output?.answer); // 4
|
||||
* ```
|
||||
*/
|
||||
stream(body, options) {
|
||||
return MessageStream.createMessage(this, body, options, { logger: this._client.logger ?? console });
|
||||
}
|
||||
/**
|
||||
* Count the number of tokens in a Message.
|
||||
*
|
||||
* The Token Count API can be used to count the number of tokens in a Message,
|
||||
* including tools, images, and documents, without creating it.
|
||||
*
|
||||
* Learn more about token counting in our
|
||||
* [user guide](https://docs.claude.com/en/docs/build-with-claude/token-counting)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const messageTokensCount =
|
||||
* await client.messages.countTokens({
|
||||
* messages: [{ content: 'string', role: 'user' }],
|
||||
* model: 'claude-opus-4-6',
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
countTokens(body, options) {
|
||||
return this._client.post('/v1/messages/count_tokens', { body, ...options });
|
||||
}
|
||||
}
|
||||
const DEPRECATED_MODELS = {
|
||||
'claude-1.3': 'November 6th, 2024',
|
||||
'claude-1.3-100k': 'November 6th, 2024',
|
||||
'claude-instant-1.1': 'November 6th, 2024',
|
||||
'claude-instant-1.1-100k': 'November 6th, 2024',
|
||||
'claude-instant-1.2': 'November 6th, 2024',
|
||||
'claude-3-sonnet-20240229': 'July 21st, 2025',
|
||||
'claude-3-opus-20240229': 'January 5th, 2026',
|
||||
'claude-2.1': 'July 21st, 2025',
|
||||
'claude-2.0': 'July 21st, 2025',
|
||||
'claude-3-7-sonnet-latest': 'February 19th, 2026',
|
||||
'claude-3-7-sonnet-20250219': 'February 19th, 2026',
|
||||
'claude-3-5-haiku-latest': 'February 19th, 2026',
|
||||
'claude-3-5-haiku-20241022': 'February 19th, 2026',
|
||||
};
|
||||
const MODELS_TO_WARN_WITH_THINKING_ENABLED = ['claude-opus-4-6'];
|
||||
Messages.Batches = Batches;
|
||||
//# sourceMappingURL=messages.mjs.map
|
||||
41
extracted-source/node_modules/@anthropic-ai/sdk/resources/models.mjs
generated
vendored
Normal file
41
extracted-source/node_modules/@anthropic-ai/sdk/resources/models.mjs
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { APIResource } from "../core/resource.mjs";
|
||||
import { Page } from "../core/pagination.mjs";
|
||||
import { buildHeaders } from "../internal/headers.mjs";
|
||||
import { path } from "../internal/utils/path.mjs";
|
||||
export class Models extends APIResource {
|
||||
/**
|
||||
* Get a specific model.
|
||||
*
|
||||
* The Models API response can be used to determine information about a specific
|
||||
* model or resolve a model alias to a model ID.
|
||||
*/
|
||||
retrieve(modelID, params = {}, options) {
|
||||
const { betas } = params ?? {};
|
||||
return this._client.get(path `/v1/models/${modelID}`, {
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ ...(betas?.toString() != null ? { 'anthropic-beta': betas?.toString() } : undefined) },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* List available models.
|
||||
*
|
||||
* The Models API response can be used to determine which models are available for
|
||||
* use in the API. More recently released models are listed first.
|
||||
*/
|
||||
list(params = {}, options) {
|
||||
const { betas, ...query } = params ?? {};
|
||||
return this._client.getAPIList('/v1/models', (Page), {
|
||||
query,
|
||||
...options,
|
||||
headers: buildHeaders([
|
||||
{ ...(betas?.toString() != null ? { 'anthropic-beta': betas?.toString() } : undefined) },
|
||||
options?.headers,
|
||||
]),
|
||||
});
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=models.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/sdk/streaming.mjs
generated
vendored
Normal file
2
extracted-source/node_modules/@anthropic-ai/sdk/streaming.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from "./core/streaming.mjs";
|
||||
//# sourceMappingURL=streaming.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/sdk/version.mjs
generated
vendored
Normal file
2
extracted-source/node_modules/@anthropic-ai/sdk/version.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export const VERSION = '0.74.0'; // x-release-please-version
|
||||
//# sourceMappingURL=version.mjs.map
|
||||
114
extracted-source/node_modules/@anthropic-ai/vertex-sdk/client.mjs
generated
vendored
Normal file
114
extracted-source/node_modules/@anthropic-ai/vertex-sdk/client.mjs
generated
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
import { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
import * as Resources from '@anthropic-ai/sdk/resources/index';
|
||||
import { GoogleAuth } from 'google-auth-library';
|
||||
import { readEnv } from "./internal/utils/env.mjs";
|
||||
import { isObj } from "./internal/utils/values.mjs";
|
||||
import { buildHeaders } from "./internal/headers.mjs";
|
||||
export { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
const DEFAULT_VERSION = 'vertex-2023-10-16';
|
||||
const MODEL_ENDPOINTS = new Set(['/v1/messages', '/v1/messages?beta=true']);
|
||||
export class AnthropicVertex extends BaseAnthropic {
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic Vertex API.
|
||||
*
|
||||
* @param {string | null} opts.accessToken
|
||||
* @param {string | null} opts.projectId
|
||||
* @param {GoogleAuth} opts.googleAuth - Override the default google auth config
|
||||
* @param {AuthClient} opts.authClient - Provide a pre-configured AuthClient instance (alternative to googleAuth)
|
||||
* @param {string | null} [opts.region=process.env['CLOUD_ML_REGION']] - The region to use for the API. Use 'global' for global endpoint. [More details here](https://cloud.google.com/vertex-ai/generative-ai/docs/learn/locations).
|
||||
* @param {string} [opts.baseURL=process.env['ANTHROPIC_VERTEX__BASE_URL'] ?? https://${region}-aiplatform.googleapis.com/v1] - Override the default base URL for the API.
|
||||
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
||||
* @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls.
|
||||
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
||||
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
||||
* @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API.
|
||||
* @param {Record<string, string | undefined>} opts.defaultQuery - Default query parameters to include with every request to the API.
|
||||
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
||||
*/
|
||||
constructor({ baseURL = readEnv('ANTHROPIC_VERTEX_BASE_URL'), region = readEnv('CLOUD_ML_REGION') ?? null, projectId = readEnv('ANTHROPIC_VERTEX_PROJECT_ID') ?? null, ...opts } = {}) {
|
||||
if (!region) {
|
||||
throw new Error('No region was given. The client should be instantiated with the `region` option or the `CLOUD_ML_REGION` environment variable should be set.');
|
||||
}
|
||||
super({
|
||||
baseURL: baseURL ||
|
||||
(region === 'global' ?
|
||||
'https://aiplatform.googleapis.com/v1'
|
||||
: `https://${region}-aiplatform.googleapis.com/v1`),
|
||||
...opts,
|
||||
});
|
||||
this.messages = makeMessagesResource(this);
|
||||
this.beta = makeBetaResource(this);
|
||||
this.region = region;
|
||||
this.projectId = projectId;
|
||||
this.accessToken = opts.accessToken ?? null;
|
||||
if (opts.authClient && opts.googleAuth) {
|
||||
throw new Error('You cannot provide both `authClient` and `googleAuth`. Please provide only one of them.');
|
||||
}
|
||||
else if (opts.authClient) {
|
||||
this._authClientPromise = Promise.resolve(opts.authClient);
|
||||
}
|
||||
else {
|
||||
this._auth =
|
||||
opts.googleAuth ?? new GoogleAuth({ scopes: 'https://www.googleapis.com/auth/cloud-platform' });
|
||||
this._authClientPromise = this._auth.getClient();
|
||||
}
|
||||
}
|
||||
validateHeaders() {
|
||||
// auth validation is handled in prepareOptions since it needs to be async
|
||||
}
|
||||
async prepareOptions(options) {
|
||||
const authClient = await this._authClientPromise;
|
||||
const authHeaders = await authClient.getRequestHeaders();
|
||||
const projectId = authClient.projectId ?? authHeaders['x-goog-user-project'];
|
||||
if (!this.projectId && projectId) {
|
||||
this.projectId = projectId;
|
||||
}
|
||||
options.headers = buildHeaders([authHeaders, options.headers]);
|
||||
}
|
||||
async buildRequest(options) {
|
||||
if (isObj(options.body)) {
|
||||
// create a shallow copy of the request body so that code that mutates it later
|
||||
// doesn't mutate the original user-provided object
|
||||
options.body = { ...options.body };
|
||||
}
|
||||
if (isObj(options.body)) {
|
||||
if (!options.body['anthropic_version']) {
|
||||
options.body['anthropic_version'] = DEFAULT_VERSION;
|
||||
}
|
||||
}
|
||||
if (MODEL_ENDPOINTS.has(options.path) && options.method === 'post') {
|
||||
if (!this.projectId) {
|
||||
throw new Error('No projectId was given and it could not be resolved from credentials. The client should be instantiated with the `projectId` option or the `ANTHROPIC_VERTEX_PROJECT_ID` environment variable should be set.');
|
||||
}
|
||||
if (!isObj(options.body)) {
|
||||
throw new Error('Expected request body to be an object for post /v1/messages');
|
||||
}
|
||||
const model = options.body['model'];
|
||||
options.body['model'] = undefined;
|
||||
const stream = options.body['stream'] ?? false;
|
||||
const specifier = stream ? 'streamRawPredict' : 'rawPredict';
|
||||
options.path = `/projects/${this.projectId}/locations/${this.region}/publishers/anthropic/models/${model}:${specifier}`;
|
||||
}
|
||||
if (options.path === '/v1/messages/count_tokens' ||
|
||||
(options.path == '/v1/messages/count_tokens?beta=true' && options.method === 'post')) {
|
||||
if (!this.projectId) {
|
||||
throw new Error('No projectId was given and it could not be resolved from credentials. The client should be instantiated with the `projectId` option or the `ANTHROPIC_VERTEX_PROJECT_ID` environment variable should be set.');
|
||||
}
|
||||
options.path = `/projects/${this.projectId}/locations/${this.region}/publishers/anthropic/models/count-tokens:rawPredict`;
|
||||
}
|
||||
return super.buildRequest(options);
|
||||
}
|
||||
}
|
||||
function makeMessagesResource(client) {
|
||||
const resource = new Resources.Messages(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.batches;
|
||||
return resource;
|
||||
}
|
||||
function makeBetaResource(client) {
|
||||
const resource = new Resources.Beta(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.batches;
|
||||
return resource;
|
||||
}
|
||||
//# sourceMappingURL=client.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/vertex-sdk/core/error.mjs
generated
vendored
Normal file
2
extracted-source/node_modules/@anthropic-ai/vertex-sdk/core/error.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from '@anthropic-ai/sdk/core/error';
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/vertex-sdk/index.mjs
generated
vendored
Normal file
3
extracted-source/node_modules/@anthropic-ai/vertex-sdk/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./client.mjs";
|
||||
export { AnthropicVertex as default } from "./client.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user