mirror of
https://github.com/tvytlx/ai-agent-deep-dive.git
synced 2026-04-05 08:34:47 +08:00
Add extracted source directory and README navigation
This commit is contained in:
163
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/AWS_restJson1.mjs
generated
vendored
Normal file
163
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/AWS_restJson1.mjs
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
// Copied from https://github.com/aws/aws-sdk-js-v3/blob/bee66fbd2a519a16b57c787b2689af857af720af/clients/client-bedrock-runtime/src/protocols/Aws_restJson1.ts
|
||||
// Modified to remove unnecessary code (we only need to call `de_ResponseStream`) and to adjust imports.
|
||||
import { collectBody, decorateServiceException as __decorateServiceException, expectInt32 as __expectInt32, expectString as __expectString, map, take, } from '@smithy/smithy-client';
|
||||
import { InternalServerException, ModelStreamErrorException, ThrottlingException, ValidationException, } from '@aws-sdk/client-bedrock-runtime';
|
||||
/**
|
||||
* deserializeAws_restJson1InternalServerExceptionRes
|
||||
*/
|
||||
const de_InternalServerExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new InternalServerException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ModelStreamErrorExceptionRes
|
||||
*/
|
||||
const de_ModelStreamErrorExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
originalMessage: __expectString,
|
||||
originalStatusCode: __expectInt32,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ModelStreamErrorException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ThrottlingExceptionRes
|
||||
*/
|
||||
const de_ThrottlingExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ThrottlingException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ValidationExceptionRes
|
||||
*/
|
||||
const de_ValidationExceptionRes = async (parsedOutput, context) => {
|
||||
const contents = map({});
|
||||
const data = parsedOutput.body;
|
||||
const doc = take(data, {
|
||||
message: __expectString,
|
||||
});
|
||||
Object.assign(contents, doc);
|
||||
const exception = new ValidationException({
|
||||
$metadata: deserializeMetadata(parsedOutput),
|
||||
...contents,
|
||||
});
|
||||
return __decorateServiceException(exception, parsedOutput.body);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1ResponseStream
|
||||
*/
|
||||
export const de_ResponseStream = (output, context) => {
|
||||
return context.eventStreamMarshaller.deserialize(output, async (event) => {
|
||||
if (event['chunk'] != null) {
|
||||
return {
|
||||
chunk: await de_PayloadPart_event(event['chunk'], context),
|
||||
};
|
||||
}
|
||||
if (event['internalServerException'] != null) {
|
||||
return {
|
||||
internalServerException: await de_InternalServerException_event(event['internalServerException'], context),
|
||||
};
|
||||
}
|
||||
if (event['modelStreamErrorException'] != null) {
|
||||
return {
|
||||
modelStreamErrorException: await de_ModelStreamErrorException_event(event['modelStreamErrorException'], context),
|
||||
};
|
||||
}
|
||||
if (event['validationException'] != null) {
|
||||
return {
|
||||
validationException: await de_ValidationException_event(event['validationException'], context),
|
||||
};
|
||||
}
|
||||
if (event['throttlingException'] != null) {
|
||||
return {
|
||||
throttlingException: await de_ThrottlingException_event(event['throttlingException'], context),
|
||||
};
|
||||
}
|
||||
return { $unknown: output };
|
||||
});
|
||||
};
|
||||
const de_InternalServerException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_InternalServerExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_ModelStreamErrorException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ModelStreamErrorExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_PayloadPart_event = async (output, context) => {
|
||||
const contents = {};
|
||||
const data = await parseBody(output.body, context);
|
||||
Object.assign(contents, de_PayloadPart(data, context));
|
||||
return contents;
|
||||
};
|
||||
const de_ThrottlingException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ThrottlingExceptionRes(parsedOutput, context);
|
||||
};
|
||||
const de_ValidationException_event = async (output, context) => {
|
||||
const parsedOutput = {
|
||||
...output,
|
||||
body: await parseBody(output.body, context),
|
||||
};
|
||||
return de_ValidationExceptionRes(parsedOutput, context);
|
||||
};
|
||||
/**
|
||||
* deserializeAws_restJson1PayloadPart
|
||||
*/
|
||||
const de_PayloadPart = (output, context) => {
|
||||
return take(output, {
|
||||
bytes: context.base64Decoder,
|
||||
});
|
||||
};
|
||||
const deserializeMetadata = (output) => ({
|
||||
httpStatusCode: output.statusCode,
|
||||
requestId: output.headers['x-amzn-requestid'] ??
|
||||
output.headers['x-amzn-request-id'] ??
|
||||
output.headers['x-amz-request-id'] ??
|
||||
'',
|
||||
extendedRequestId: output.headers['x-amz-id-2'] ?? '',
|
||||
cfId: output.headers['x-amz-cf-id'] ?? '',
|
||||
});
|
||||
// Encode Uint8Array data into string with utf-8.
|
||||
const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body));
|
||||
const parseBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => {
|
||||
if (encoded.length) {
|
||||
return JSON.parse(encoded);
|
||||
}
|
||||
return {};
|
||||
});
|
||||
//# sourceMappingURL=AWS_restJson1.mjs.map
|
||||
124
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/client.mjs
generated
vendored
Normal file
124
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/client.mjs
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
import { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
import * as Resources from '@anthropic-ai/sdk/resources/index';
|
||||
import { getAuthHeaders } from "./core/auth.mjs";
|
||||
import { Stream } from "./core/streaming.mjs";
|
||||
import { readEnv } from "./internal/utils/env.mjs";
|
||||
import { isObj } from "./internal/utils/values.mjs";
|
||||
import { buildHeaders } from "./internal/headers.mjs";
|
||||
import { path } from "./internal/utils/path.mjs";
|
||||
export { BaseAnthropic } from '@anthropic-ai/sdk/client';
|
||||
const DEFAULT_VERSION = 'bedrock-2023-05-31';
|
||||
const MODEL_ENDPOINTS = new Set(['/v1/complete', '/v1/messages', '/v1/messages?beta=true']);
|
||||
/** API Client for interfacing with the Anthropic Bedrock API. */
|
||||
export class AnthropicBedrock extends BaseAnthropic {
|
||||
/**
|
||||
* API Client for interfacing with the Anthropic Bedrock API.
|
||||
*
|
||||
* @param {string | null | undefined} [opts.awsSecretKey]
|
||||
* @param {string | null | undefined} [opts.awsAccessKey]
|
||||
* @param {string | undefined} [opts.awsRegion=process.env['AWS_REGION'] ?? us-east-1]
|
||||
* @param {string | null | undefined} [opts.awsSessionToken]
|
||||
* @param {(() => Promise<AwsCredentialIdentityProvider>) | null} [opts.providerChainResolver] - Custom provider chain resolver for AWS credentials. Useful for non-Node environments.
|
||||
* @param {string} [opts.baseURL=process.env['ANTHROPIC_BEDROCK_BASE_URL'] ?? https://bedrock-runtime.${this.awsRegion}.amazonaws.com] - Override the default base URL for the API.
|
||||
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
||||
* @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls.
|
||||
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
||||
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
||||
* @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API.
|
||||
* @param {Record<string, string | undefined>} opts.defaultQuery - Default query parameters to include with every request to the API.
|
||||
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
||||
* @param {boolean} [opts.skipAuth=false] - Skip authentication for this request. This is useful if you have an internal proxy that handles authentication for you.
|
||||
*/
|
||||
constructor({ awsRegion = readEnv('AWS_REGION') ?? 'us-east-1', baseURL = readEnv('ANTHROPIC_BEDROCK_BASE_URL') ?? `https://bedrock-runtime.${awsRegion}.amazonaws.com`, awsSecretKey = null, awsAccessKey = null, awsSessionToken = null, providerChainResolver = null, ...opts } = {}) {
|
||||
super({
|
||||
baseURL,
|
||||
...opts,
|
||||
});
|
||||
this.skipAuth = false;
|
||||
this.messages = makeMessagesResource(this);
|
||||
this.completions = new Resources.Completions(this);
|
||||
this.beta = makeBetaResource(this);
|
||||
this.awsSecretKey = awsSecretKey;
|
||||
this.awsAccessKey = awsAccessKey;
|
||||
this.awsRegion = awsRegion;
|
||||
this.awsSessionToken = awsSessionToken;
|
||||
this.skipAuth = opts.skipAuth ?? false;
|
||||
this.providerChainResolver = providerChainResolver;
|
||||
}
|
||||
validateHeaders() {
|
||||
// auth validation is handled in prepareRequest since it needs to be async
|
||||
}
|
||||
async prepareRequest(request, { url, options }) {
|
||||
if (this.skipAuth) {
|
||||
return;
|
||||
}
|
||||
const regionName = this.awsRegion;
|
||||
if (!regionName) {
|
||||
throw new Error('Expected `awsRegion` option to be passed to the client or the `AWS_REGION` environment variable to be present');
|
||||
}
|
||||
const headers = await getAuthHeaders(request, {
|
||||
url,
|
||||
regionName,
|
||||
awsAccessKey: this.awsAccessKey,
|
||||
awsSecretKey: this.awsSecretKey,
|
||||
awsSessionToken: this.awsSessionToken,
|
||||
fetchOptions: this.fetchOptions,
|
||||
providerChainResolver: this.providerChainResolver,
|
||||
});
|
||||
request.headers = buildHeaders([headers, request.headers]).values;
|
||||
}
|
||||
async buildRequest(options) {
|
||||
options.__streamClass = Stream;
|
||||
if (isObj(options.body)) {
|
||||
// create a shallow copy of the request body so that code that mutates it later
|
||||
// doesn't mutate the original user-provided object
|
||||
options.body = { ...options.body };
|
||||
}
|
||||
if (isObj(options.body)) {
|
||||
if (!options.body['anthropic_version']) {
|
||||
options.body['anthropic_version'] = DEFAULT_VERSION;
|
||||
}
|
||||
if (options.headers && !options.body['anthropic_beta']) {
|
||||
const betas = buildHeaders([options.headers]).values.get('anthropic-beta');
|
||||
if (betas != null) {
|
||||
options.body['anthropic_beta'] = betas.split(',');
|
||||
}
|
||||
}
|
||||
}
|
||||
if (MODEL_ENDPOINTS.has(options.path) && options.method === 'post') {
|
||||
if (!isObj(options.body)) {
|
||||
throw new Error('Expected request body to be an object for post /v1/messages');
|
||||
}
|
||||
const model = options.body['model'];
|
||||
options.body['model'] = undefined;
|
||||
const stream = options.body['stream'];
|
||||
options.body['stream'] = undefined;
|
||||
if (stream) {
|
||||
options.path = path `/model/${model}/invoke-with-response-stream`;
|
||||
}
|
||||
else {
|
||||
options.path = path `/model/${model}/invoke`;
|
||||
}
|
||||
}
|
||||
return super.buildRequest(options);
|
||||
}
|
||||
}
|
||||
function makeMessagesResource(client) {
|
||||
const resource = new Resources.Messages(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.batches;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.countTokens;
|
||||
return resource;
|
||||
}
|
||||
function makeBetaResource(client) {
|
||||
const resource = new Resources.Beta(client);
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.promptCaching;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.batches;
|
||||
// @ts-expect-error we're deleting non-optional properties
|
||||
delete resource.messages.countTokens;
|
||||
return resource;
|
||||
}
|
||||
//# sourceMappingURL=client.mjs.map
|
||||
82
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/auth.mjs
generated
vendored
Normal file
82
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/auth.mjs
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
import { Sha256 } from '@aws-crypto/sha256-js';
|
||||
import { FetchHttpHandler } from '@smithy/fetch-http-handler';
|
||||
import { HttpRequest } from '@smithy/protocol-http';
|
||||
import { SignatureV4 } from '@smithy/signature-v4';
|
||||
import assert from 'assert';
|
||||
const DEFAULT_PROVIDER_CHAIN_RESOLVER = () => import('@aws-sdk/credential-providers').then(({ fromNodeProviderChain }) => fromNodeProviderChain({
|
||||
clientConfig: {
|
||||
requestHandler: new FetchHttpHandler({
|
||||
requestInit: (httpRequest) => {
|
||||
return {
|
||||
...httpRequest,
|
||||
};
|
||||
},
|
||||
}),
|
||||
},
|
||||
}))
|
||||
.catch((error) => {
|
||||
throw new Error(`Failed to import '@aws-sdk/credential-providers'.` +
|
||||
`You can provide a custom \`providerChainResolver\` in the client options if your runtime does not have access to '@aws-sdk/credential-providers': ` +
|
||||
`\`new AnthropicBedrock({ providerChainResolver })\` ` +
|
||||
`Original error: ${error.message}`);
|
||||
});
|
||||
export const getAuthHeaders = async (req, props) => {
|
||||
assert(req.method, 'Expected request method property to be set');
|
||||
const providerChain = await (props.providerChainResolver ?
|
||||
props.providerChainResolver()
|
||||
: DEFAULT_PROVIDER_CHAIN_RESOLVER());
|
||||
const credentials = await withTempEnv(() => {
|
||||
// Temporarily set the appropriate environment variables if we've been
|
||||
// explicitly given credentials so that the credentials provider can
|
||||
// resolve them.
|
||||
//
|
||||
// Note: the environment provider is only not run first if the `AWS_PROFILE`
|
||||
// environment variable is set.
|
||||
// https://github.com/aws/aws-sdk-js-v3/blob/44a18a34b2c93feccdfcd162928d13e6dbdcaf30/packages/credential-provider-node/src/defaultProvider.ts#L49
|
||||
if (props.awsAccessKey) {
|
||||
process.env['AWS_ACCESS_KEY_ID'] = props.awsAccessKey;
|
||||
}
|
||||
if (props.awsSecretKey) {
|
||||
process.env['AWS_SECRET_ACCESS_KEY'] = props.awsSecretKey;
|
||||
}
|
||||
if (props.awsSessionToken) {
|
||||
process.env['AWS_SESSION_TOKEN'] = props.awsSessionToken;
|
||||
}
|
||||
}, () => providerChain());
|
||||
const signer = new SignatureV4({
|
||||
service: 'bedrock',
|
||||
region: props.regionName,
|
||||
credentials,
|
||||
sha256: Sha256,
|
||||
});
|
||||
const url = new URL(props.url);
|
||||
const headers = !req.headers ? {}
|
||||
: Symbol.iterator in req.headers ?
|
||||
Object.fromEntries(Array.from(req.headers).map((header) => [...header]))
|
||||
: { ...req.headers };
|
||||
// The connection header may be stripped by a proxy somewhere, so the receiver
|
||||
// of this message may not see this header, so we remove it from the set of headers
|
||||
// that are signed.
|
||||
delete headers['connection'];
|
||||
headers['host'] = url.hostname;
|
||||
const request = new HttpRequest({
|
||||
method: req.method.toUpperCase(),
|
||||
protocol: url.protocol,
|
||||
path: url.pathname,
|
||||
headers,
|
||||
body: req.body,
|
||||
});
|
||||
const signed = await signer.sign(request);
|
||||
return signed.headers;
|
||||
};
|
||||
const withTempEnv = async (updateEnv, fn) => {
|
||||
const previousEnv = { ...process.env };
|
||||
try {
|
||||
updateEnv();
|
||||
return await fn();
|
||||
}
|
||||
finally {
|
||||
process.env = previousEnv;
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=auth.mjs.map
|
||||
2
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/error.mjs
generated
vendored
Normal file
2
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/error.mjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from '@anthropic-ai/sdk/core/error';
|
||||
//# sourceMappingURL=error.mjs.map
|
||||
108
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/streaming.mjs
generated
vendored
Normal file
108
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/core/streaming.mjs
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
import { EventStreamMarshaller } from '@smithy/eventstream-serde-node';
|
||||
import { fromBase64, toBase64 } from '@smithy/util-base64';
|
||||
import { streamCollector } from '@smithy/fetch-http-handler';
|
||||
import { Stream as CoreStream } from '@anthropic-ai/sdk/streaming';
|
||||
import { AnthropicError } from '@anthropic-ai/sdk/error';
|
||||
import { APIError } from '@anthropic-ai/sdk';
|
||||
import { de_ResponseStream } from "../AWS_restJson1.mjs";
|
||||
import { ReadableStreamToAsyncIterable } from "../internal/shims.mjs";
|
||||
import { safeJSON } from "../internal/utils/values.mjs";
|
||||
import { loggerFor } from "../internal/utils/log.mjs";
|
||||
export const toUtf8 = (input) => new TextDecoder('utf-8').decode(input);
|
||||
export const fromUtf8 = (input) => new TextEncoder().encode(input);
|
||||
// `de_ResponseStream` parses a Bedrock response stream and emits events as they are found.
|
||||
// It requires a "context" argument which has many fields, but for what we're using it for
|
||||
// it only needs this.
|
||||
export const getMinimalSerdeContext = () => {
|
||||
const marshaller = new EventStreamMarshaller({ utf8Encoder: toUtf8, utf8Decoder: fromUtf8 });
|
||||
return {
|
||||
base64Decoder: fromBase64,
|
||||
base64Encoder: toBase64,
|
||||
utf8Decoder: fromUtf8,
|
||||
utf8Encoder: toUtf8,
|
||||
eventStreamMarshaller: marshaller,
|
||||
streamCollector: streamCollector,
|
||||
};
|
||||
};
|
||||
export class Stream extends CoreStream {
|
||||
static fromSSEResponse(response, controller, client) {
|
||||
let consumed = false;
|
||||
const logger = client ? loggerFor(client) : console;
|
||||
async function* iterMessages() {
|
||||
if (!response.body) {
|
||||
controller.abort();
|
||||
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
||||
}
|
||||
const responseBodyIter = ReadableStreamToAsyncIterable(response.body);
|
||||
const eventStream = de_ResponseStream(responseBodyIter, getMinimalSerdeContext());
|
||||
for await (const event of eventStream) {
|
||||
if (event.chunk && event.chunk.bytes) {
|
||||
const s = toUtf8(event.chunk.bytes);
|
||||
yield { event: 'chunk', data: s, raw: [] };
|
||||
}
|
||||
else if (event.internalServerException) {
|
||||
yield { event: 'error', data: 'InternalServerException', raw: [] };
|
||||
}
|
||||
else if (event.modelStreamErrorException) {
|
||||
yield { event: 'error', data: 'ModelStreamErrorException', raw: [] };
|
||||
}
|
||||
else if (event.validationException) {
|
||||
yield { event: 'error', data: 'ValidationException', raw: [] };
|
||||
}
|
||||
else if (event.throttlingException) {
|
||||
yield { event: 'error', data: 'ThrottlingException', raw: [] };
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: this function is copied entirely from the core SDK
|
||||
async function* iterator() {
|
||||
if (consumed) {
|
||||
throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
|
||||
}
|
||||
consumed = true;
|
||||
let done = false;
|
||||
try {
|
||||
for await (const sse of iterMessages()) {
|
||||
if (sse.event === 'chunk') {
|
||||
try {
|
||||
yield JSON.parse(sse.data);
|
||||
}
|
||||
catch (e) {
|
||||
logger.error(`Could not parse message into JSON:`, sse.data);
|
||||
logger.error(`From chunk:`, sse.raw);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (sse.event === 'error') {
|
||||
const errText = sse.data;
|
||||
const errJSON = safeJSON(errText);
|
||||
const errMessage = errJSON ? undefined : errText;
|
||||
throw APIError.generate(undefined, errJSON, errMessage, response.headers);
|
||||
}
|
||||
}
|
||||
done = true;
|
||||
}
|
||||
catch (e) {
|
||||
// If the user calls `stream.controller.abort()`, we should exit without throwing.
|
||||
if (isAbortError(e))
|
||||
return;
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
// If the user `break`s, abort the ongoing request.
|
||||
if (!done)
|
||||
controller.abort();
|
||||
}
|
||||
}
|
||||
return new Stream(iterator, controller);
|
||||
}
|
||||
}
|
||||
function isAbortError(err) {
|
||||
return (typeof err === 'object' &&
|
||||
err !== null &&
|
||||
// Spec-compliant fetch implementations
|
||||
(('name' in err && err.name === 'AbortError') ||
|
||||
// Expo fetch
|
||||
('message' in err && String(err.message).includes('FetchRequestCanceledException'))));
|
||||
}
|
||||
//# sourceMappingURL=streaming.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/index.mjs
generated
vendored
Normal file
3
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./client.mjs";
|
||||
export { AnthropicBedrock as default } from "./client.mjs";
|
||||
//# sourceMappingURL=index.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/headers.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/headers.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { isReadonlyArray } from "./utils/values.mjs";
|
||||
const brand_privateNullableHeaders = Symbol.for('brand.privateNullableHeaders');
|
||||
function* iterateHeaders(headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
if (brand_privateNullableHeaders in headers) {
|
||||
const { values, nulls } = headers;
|
||||
yield* values.entries();
|
||||
for (const name of nulls) {
|
||||
yield [name, null];
|
||||
}
|
||||
return;
|
||||
}
|
||||
let shouldClear = false;
|
||||
let iter;
|
||||
if (headers instanceof Headers) {
|
||||
iter = headers.entries();
|
||||
}
|
||||
else if (isReadonlyArray(headers)) {
|
||||
iter = headers;
|
||||
}
|
||||
else {
|
||||
shouldClear = true;
|
||||
iter = Object.entries(headers ?? {});
|
||||
}
|
||||
for (let row of iter) {
|
||||
const name = row[0];
|
||||
if (typeof name !== 'string')
|
||||
throw new TypeError('expected header name to be a string');
|
||||
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
||||
let didClear = false;
|
||||
for (const value of values) {
|
||||
if (value === undefined)
|
||||
continue;
|
||||
// Objects keys always overwrite older headers, they never append.
|
||||
// Yield a null to clear the header before adding the new values.
|
||||
if (shouldClear && !didClear) {
|
||||
didClear = true;
|
||||
yield [name, null];
|
||||
}
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
export const buildHeaders = (newHeaders) => {
|
||||
const targetHeaders = new Headers();
|
||||
const nullHeaders = new Set();
|
||||
for (const headers of newHeaders) {
|
||||
const seenHeaders = new Set();
|
||||
for (const [name, value] of iterateHeaders(headers)) {
|
||||
const lowerName = name.toLowerCase();
|
||||
if (!seenHeaders.has(lowerName)) {
|
||||
targetHeaders.delete(name);
|
||||
seenHeaders.add(lowerName);
|
||||
}
|
||||
if (value === null) {
|
||||
targetHeaders.delete(name);
|
||||
nullHeaders.add(lowerName);
|
||||
}
|
||||
else {
|
||||
targetHeaders.append(name, value);
|
||||
nullHeaders.delete(lowerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
||||
};
|
||||
export const isEmptyHeaders = (headers) => {
|
||||
for (const _ of iterateHeaders(headers))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
//# sourceMappingURL=headers.mjs.map
|
||||
85
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/shims.mjs
generated
vendored
Normal file
85
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/shims.mjs
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function getDefaultFetch() {
|
||||
if (typeof fetch !== 'undefined') {
|
||||
return fetch;
|
||||
}
|
||||
throw new Error('`fetch` is not defined as a global; Either pass `fetch` to the client, `new Anthropic({ fetch })` or polyfill the global, `globalThis.fetch = fetch`');
|
||||
}
|
||||
export function makeReadableStream(...args) {
|
||||
const ReadableStream = globalThis.ReadableStream;
|
||||
if (typeof ReadableStream === 'undefined') {
|
||||
// Note: All of the platforms / runtimes we officially support already define
|
||||
// `ReadableStream` as a global, so this should only ever be hit on unsupported runtimes.
|
||||
throw new Error('`ReadableStream` is not defined as a global; You will need to polyfill it, `globalThis.ReadableStream = ReadableStream`');
|
||||
}
|
||||
return new ReadableStream(...args);
|
||||
}
|
||||
export function ReadableStreamFrom(iterable) {
|
||||
let iter = Symbol.asyncIterator in iterable ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
|
||||
return makeReadableStream({
|
||||
start() { },
|
||||
async pull(controller) {
|
||||
const { done, value } = await iter.next();
|
||||
if (done) {
|
||||
controller.close();
|
||||
}
|
||||
else {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await iter.return?.();
|
||||
},
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Most browsers don't yet have async iterable support for ReadableStream,
|
||||
* and Node has a very different way of reading bytes from its "ReadableStream".
|
||||
*
|
||||
* This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
|
||||
*/
|
||||
export function ReadableStreamToAsyncIterable(stream) {
|
||||
if (stream[Symbol.asyncIterator])
|
||||
return stream;
|
||||
const reader = stream.getReader();
|
||||
return {
|
||||
async next() {
|
||||
try {
|
||||
const result = await reader.read();
|
||||
if (result?.done)
|
||||
reader.releaseLock(); // release lock when stream becomes closed
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
reader.releaseLock(); // release lock when stream becomes errored
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async return() {
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
return { done: true, value: undefined };
|
||||
},
|
||||
[Symbol.asyncIterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Cancels a ReadableStream we don't need to consume.
|
||||
* See https://undici.nodejs.org/#/?id=garbage-collection
|
||||
*/
|
||||
export async function CancelReadableStream(stream) {
|
||||
if (stream === null || typeof stream !== 'object')
|
||||
return;
|
||||
if (stream[Symbol.asyncIterator]) {
|
||||
await stream[Symbol.asyncIterator]().return?.();
|
||||
return;
|
||||
}
|
||||
const reader = stream.getReader();
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
}
|
||||
//# sourceMappingURL=shims.mjs.map
|
||||
18
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/env.mjs
generated
vendored
Normal file
18
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/env.mjs
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* Read an environment variable.
|
||||
*
|
||||
* Trims beginning and trailing whitespace.
|
||||
*
|
||||
* Will return undefined if the environment variable doesn't exist or cannot be accessed.
|
||||
*/
|
||||
export const readEnv = (env) => {
|
||||
if (typeof globalThis.process !== 'undefined') {
|
||||
return globalThis.process.env?.[env]?.trim() ?? undefined;
|
||||
}
|
||||
if (typeof globalThis.Deno !== 'undefined') {
|
||||
return globalThis.Deno.env?.get?.(env)?.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
//# sourceMappingURL=env.mjs.map
|
||||
80
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/log.mjs
generated
vendored
Normal file
80
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/log.mjs
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { hasOwn } from "./values.mjs";
|
||||
const levelNumbers = {
|
||||
off: 0,
|
||||
error: 200,
|
||||
warn: 300,
|
||||
info: 400,
|
||||
debug: 500,
|
||||
};
|
||||
export const parseLogLevel = (maybeLevel, sourceName, client) => {
|
||||
if (!maybeLevel) {
|
||||
return undefined;
|
||||
}
|
||||
if (hasOwn(levelNumbers, maybeLevel)) {
|
||||
return maybeLevel;
|
||||
}
|
||||
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
||||
return undefined;
|
||||
};
|
||||
function noop() { }
|
||||
function makeLogFn(fnLevel, logger, logLevel) {
|
||||
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
||||
return noop;
|
||||
}
|
||||
else {
|
||||
// Don't wrap logger functions, we want the stacktrace intact!
|
||||
return logger[fnLevel].bind(logger);
|
||||
}
|
||||
}
|
||||
const noopLogger = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
debug: noop,
|
||||
};
|
||||
let cachedLoggers = /* @__PURE__ */ new WeakMap();
|
||||
export function loggerFor(client) {
|
||||
const logger = client.logger;
|
||||
const logLevel = client.logLevel ?? 'off';
|
||||
if (!logger) {
|
||||
return noopLogger;
|
||||
}
|
||||
const cachedLogger = cachedLoggers.get(logger);
|
||||
if (cachedLogger && cachedLogger[0] === logLevel) {
|
||||
return cachedLogger[1];
|
||||
}
|
||||
const levelLogger = {
|
||||
error: makeLogFn('error', logger, logLevel),
|
||||
warn: makeLogFn('warn', logger, logLevel),
|
||||
info: makeLogFn('info', logger, logLevel),
|
||||
debug: makeLogFn('debug', logger, logLevel),
|
||||
};
|
||||
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
||||
return levelLogger;
|
||||
}
|
||||
export const formatRequestDetails = (details) => {
|
||||
if (details.options) {
|
||||
details.options = { ...details.options };
|
||||
delete details.options['headers']; // redundant + leaks internals
|
||||
}
|
||||
if (details.headers) {
|
||||
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
||||
name,
|
||||
(name.toLowerCase() === 'x-api-key' ||
|
||||
name.toLowerCase() === 'authorization' ||
|
||||
name.toLowerCase() === 'cookie' ||
|
||||
name.toLowerCase() === 'set-cookie') ?
|
||||
'***'
|
||||
: value,
|
||||
]));
|
||||
}
|
||||
if ('retryOfRequestLogID' in details) {
|
||||
if (details.retryOfRequestLogID) {
|
||||
details.retryOf = details.retryOfRequestLogID;
|
||||
}
|
||||
delete details.retryOfRequestLogID;
|
||||
}
|
||||
return details;
|
||||
};
|
||||
//# sourceMappingURL=log.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/path.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/path.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
/**
|
||||
* Percent-encode everything that isn't safe to have in a path without encoding safe chars.
|
||||
*
|
||||
* Taken from https://datatracker.ietf.org/doc/html/rfc3986#section-3.3:
|
||||
* > unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
|
||||
* > sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
|
||||
* > pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
|
||||
*/
|
||||
export function encodeURIPath(str) {
|
||||
return str.replace(/[^A-Za-z0-9\-._~!$&'()*+,;=:@]+/g, encodeURIComponent);
|
||||
}
|
||||
const EMPTY = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.create(null));
|
||||
export const createPathTagFunction = (pathEncoder = encodeURIPath) => function path(statics, ...params) {
|
||||
// If there are no params, no processing is needed.
|
||||
if (statics.length === 1)
|
||||
return statics[0];
|
||||
let postPath = false;
|
||||
const invalidSegments = [];
|
||||
const path = statics.reduce((previousValue, currentValue, index) => {
|
||||
if (/[?#]/.test(currentValue)) {
|
||||
postPath = true;
|
||||
}
|
||||
const value = params[index];
|
||||
let encoded = (postPath ? encodeURIComponent : pathEncoder)('' + value);
|
||||
if (index !== params.length &&
|
||||
(value == null ||
|
||||
(typeof value === 'object' &&
|
||||
// handle values from other realms
|
||||
value.toString ===
|
||||
Object.getPrototypeOf(Object.getPrototypeOf(value.hasOwnProperty ?? EMPTY) ?? EMPTY)
|
||||
?.toString))) {
|
||||
encoded = value + '';
|
||||
invalidSegments.push({
|
||||
start: previousValue.length + currentValue.length,
|
||||
length: encoded.length,
|
||||
error: `Value of type ${Object.prototype.toString
|
||||
.call(value)
|
||||
.slice(8, -1)} is not a valid path parameter`,
|
||||
});
|
||||
}
|
||||
return previousValue + currentValue + (index === params.length ? '' : encoded);
|
||||
}, '');
|
||||
const pathOnly = path.split(/[?#]/, 1)[0];
|
||||
const invalidSegmentPattern = /(?<=^|\/)(?:\.|%2e){1,2}(?=\/|$)/gi;
|
||||
let match;
|
||||
// Find all invalid segments
|
||||
while ((match = invalidSegmentPattern.exec(pathOnly)) !== null) {
|
||||
invalidSegments.push({
|
||||
start: match.index,
|
||||
length: match[0].length,
|
||||
error: `Value "${match[0]}" can\'t be safely passed as a path parameter`,
|
||||
});
|
||||
}
|
||||
invalidSegments.sort((a, b) => a.start - b.start);
|
||||
if (invalidSegments.length > 0) {
|
||||
let lastEnd = 0;
|
||||
const underline = invalidSegments.reduce((acc, segment) => {
|
||||
const spaces = ' '.repeat(segment.start - lastEnd);
|
||||
const arrows = '^'.repeat(segment.length);
|
||||
lastEnd = segment.start + segment.length;
|
||||
return acc + spaces + arrows;
|
||||
}, '');
|
||||
throw new AnthropicError(`Path parameters result in path with invalid segments:\n${invalidSegments
|
||||
.map((e) => e.error)
|
||||
.join('\n')}\n${path}\n${underline}`);
|
||||
}
|
||||
return path;
|
||||
};
|
||||
/**
|
||||
* URI-encodes path params and ensures no unsafe /./ or /../ path segments are introduced.
|
||||
*/
|
||||
export const path = /* @__PURE__ */ createPathTagFunction(encodeURIPath);
|
||||
//# sourceMappingURL=path.mjs.map
|
||||
94
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/values.mjs
generated
vendored
Normal file
94
extracted-source/node_modules/@anthropic-ai/bedrock-sdk/internal/utils/values.mjs
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
// https://url.spec.whatwg.org/#url-scheme-string
|
||||
const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i;
|
||||
export const isAbsoluteURL = (url) => {
|
||||
return startsWithSchemeRegexp.test(url);
|
||||
};
|
||||
export let isArray = (val) => ((isArray = Array.isArray), isArray(val));
|
||||
export let isReadonlyArray = isArray;
|
||||
/** Returns an object if the given value isn't an object, otherwise returns as-is */
|
||||
export function maybeObj(x) {
|
||||
if (typeof x !== 'object') {
|
||||
return {};
|
||||
}
|
||||
return x ?? {};
|
||||
}
|
||||
// https://stackoverflow.com/a/34491287
|
||||
export function isEmptyObj(obj) {
|
||||
if (!obj)
|
||||
return true;
|
||||
for (const _k in obj)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
// https://eslint.org/docs/latest/rules/no-prototype-builtins
|
||||
export function hasOwn(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
export function isObj(obj) {
|
||||
return obj != null && typeof obj === 'object' && !Array.isArray(obj);
|
||||
}
|
||||
export const ensurePresent = (value) => {
|
||||
if (value == null) {
|
||||
throw new AnthropicError(`Expected a value to be given but received ${value} instead.`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
export const validatePositiveInteger = (name, n) => {
|
||||
if (typeof n !== 'number' || !Number.isInteger(n)) {
|
||||
throw new AnthropicError(`${name} must be an integer`);
|
||||
}
|
||||
if (n < 0) {
|
||||
throw new AnthropicError(`${name} must be a positive integer`);
|
||||
}
|
||||
return n;
|
||||
};
|
||||
export const coerceInteger = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return Math.round(value);
|
||||
if (typeof value === 'string')
|
||||
return parseInt(value, 10);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceFloat = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return parseFloat(value);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceBoolean = (value) => {
|
||||
if (typeof value === 'boolean')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return value === 'true';
|
||||
return Boolean(value);
|
||||
};
|
||||
export const maybeCoerceInteger = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceInteger(value);
|
||||
};
|
||||
export const maybeCoerceFloat = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceFloat(value);
|
||||
};
|
||||
export const maybeCoerceBoolean = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceBoolean(value);
|
||||
};
|
||||
export const safeJSON = (text) => {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=values.mjs.map
|
||||
Reference in New Issue
Block a user