Add extracted source directory and README navigation

This commit is contained in:
Shawn Bot
2026-03-31 14:56:06 +00:00
parent 6252bb6eb5
commit 91e01d755b
4757 changed files with 984951 additions and 0 deletions

View File

@@ -0,0 +1,82 @@
import { Sha256 } from '@aws-crypto/sha256-js';
import { FetchHttpHandler } from '@smithy/fetch-http-handler';
import { HttpRequest } from '@smithy/protocol-http';
import { SignatureV4 } from '@smithy/signature-v4';
import assert from 'assert';
const DEFAULT_PROVIDER_CHAIN_RESOLVER = () => import('@aws-sdk/credential-providers').then(({ fromNodeProviderChain }) => fromNodeProviderChain({
clientConfig: {
requestHandler: new FetchHttpHandler({
requestInit: (httpRequest) => {
return {
...httpRequest,
};
},
}),
},
}))
.catch((error) => {
throw new Error(`Failed to import '@aws-sdk/credential-providers'.` +
`You can provide a custom \`providerChainResolver\` in the client options if your runtime does not have access to '@aws-sdk/credential-providers': ` +
`\`new AnthropicBedrock({ providerChainResolver })\` ` +
`Original error: ${error.message}`);
});
export const getAuthHeaders = async (req, props) => {
assert(req.method, 'Expected request method property to be set');
const providerChain = await (props.providerChainResolver ?
props.providerChainResolver()
: DEFAULT_PROVIDER_CHAIN_RESOLVER());
const credentials = await withTempEnv(() => {
// Temporarily set the appropriate environment variables if we've been
// explicitly given credentials so that the credentials provider can
// resolve them.
//
// Note: the environment provider is only not run first if the `AWS_PROFILE`
// environment variable is set.
// https://github.com/aws/aws-sdk-js-v3/blob/44a18a34b2c93feccdfcd162928d13e6dbdcaf30/packages/credential-provider-node/src/defaultProvider.ts#L49
if (props.awsAccessKey) {
process.env['AWS_ACCESS_KEY_ID'] = props.awsAccessKey;
}
if (props.awsSecretKey) {
process.env['AWS_SECRET_ACCESS_KEY'] = props.awsSecretKey;
}
if (props.awsSessionToken) {
process.env['AWS_SESSION_TOKEN'] = props.awsSessionToken;
}
}, () => providerChain());
const signer = new SignatureV4({
service: 'bedrock',
region: props.regionName,
credentials,
sha256: Sha256,
});
const url = new URL(props.url);
const headers = !req.headers ? {}
: Symbol.iterator in req.headers ?
Object.fromEntries(Array.from(req.headers).map((header) => [...header]))
: { ...req.headers };
// The connection header may be stripped by a proxy somewhere, so the receiver
// of this message may not see this header, so we remove it from the set of headers
// that are signed.
delete headers['connection'];
headers['host'] = url.hostname;
const request = new HttpRequest({
method: req.method.toUpperCase(),
protocol: url.protocol,
path: url.pathname,
headers,
body: req.body,
});
const signed = await signer.sign(request);
return signed.headers;
};
const withTempEnv = async (updateEnv, fn) => {
const previousEnv = { ...process.env };
try {
updateEnv();
return await fn();
}
finally {
process.env = previousEnv;
}
};
//# sourceMappingURL=auth.mjs.map

View File

@@ -0,0 +1,2 @@
export * from '@anthropic-ai/sdk/core/error';
//# sourceMappingURL=error.mjs.map

View File

@@ -0,0 +1,108 @@
import { EventStreamMarshaller } from '@smithy/eventstream-serde-node';
import { fromBase64, toBase64 } from '@smithy/util-base64';
import { streamCollector } from '@smithy/fetch-http-handler';
import { Stream as CoreStream } from '@anthropic-ai/sdk/streaming';
import { AnthropicError } from '@anthropic-ai/sdk/error';
import { APIError } from '@anthropic-ai/sdk';
import { de_ResponseStream } from "../AWS_restJson1.mjs";
import { ReadableStreamToAsyncIterable } from "../internal/shims.mjs";
import { safeJSON } from "../internal/utils/values.mjs";
import { loggerFor } from "../internal/utils/log.mjs";
export const toUtf8 = (input) => new TextDecoder('utf-8').decode(input);
export const fromUtf8 = (input) => new TextEncoder().encode(input);
// `de_ResponseStream` parses a Bedrock response stream and emits events as they are found.
// It requires a "context" argument which has many fields, but for what we're using it for
// it only needs this.
export const getMinimalSerdeContext = () => {
const marshaller = new EventStreamMarshaller({ utf8Encoder: toUtf8, utf8Decoder: fromUtf8 });
return {
base64Decoder: fromBase64,
base64Encoder: toBase64,
utf8Decoder: fromUtf8,
utf8Encoder: toUtf8,
eventStreamMarshaller: marshaller,
streamCollector: streamCollector,
};
};
export class Stream extends CoreStream {
static fromSSEResponse(response, controller, client) {
let consumed = false;
const logger = client ? loggerFor(client) : console;
async function* iterMessages() {
if (!response.body) {
controller.abort();
throw new AnthropicError(`Attempted to iterate over a response with no body`);
}
const responseBodyIter = ReadableStreamToAsyncIterable(response.body);
const eventStream = de_ResponseStream(responseBodyIter, getMinimalSerdeContext());
for await (const event of eventStream) {
if (event.chunk && event.chunk.bytes) {
const s = toUtf8(event.chunk.bytes);
yield { event: 'chunk', data: s, raw: [] };
}
else if (event.internalServerException) {
yield { event: 'error', data: 'InternalServerException', raw: [] };
}
else if (event.modelStreamErrorException) {
yield { event: 'error', data: 'ModelStreamErrorException', raw: [] };
}
else if (event.validationException) {
yield { event: 'error', data: 'ValidationException', raw: [] };
}
else if (event.throttlingException) {
yield { event: 'error', data: 'ThrottlingException', raw: [] };
}
}
}
// Note: this function is copied entirely from the core SDK
async function* iterator() {
if (consumed) {
throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
}
consumed = true;
let done = false;
try {
for await (const sse of iterMessages()) {
if (sse.event === 'chunk') {
try {
yield JSON.parse(sse.data);
}
catch (e) {
logger.error(`Could not parse message into JSON:`, sse.data);
logger.error(`From chunk:`, sse.raw);
throw e;
}
}
if (sse.event === 'error') {
const errText = sse.data;
const errJSON = safeJSON(errText);
const errMessage = errJSON ? undefined : errText;
throw APIError.generate(undefined, errJSON, errMessage, response.headers);
}
}
done = true;
}
catch (e) {
// If the user calls `stream.controller.abort()`, we should exit without throwing.
if (isAbortError(e))
return;
throw e;
}
finally {
// If the user `break`s, abort the ongoing request.
if (!done)
controller.abort();
}
}
return new Stream(iterator, controller);
}
}
function isAbortError(err) {
return (typeof err === 'object' &&
err !== null &&
// Spec-compliant fetch implementations
(('name' in err && err.name === 'AbortError') ||
// Expo fetch
('message' in err && String(err.message).includes('FetchRequestCanceledException'))));
}
//# sourceMappingURL=streaming.mjs.map