mirror of
https://github.com/tvytlx/ai-agent-deep-dive.git
synced 2026-04-07 09:34:49 +08:00
Add extracted source directory and README navigation
This commit is contained in:
36
extracted-source/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js
generated
vendored
Normal file
36
extracted-source/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ByteArrayCollector = void 0;
|
||||
class ByteArrayCollector {
|
||||
allocByteArray;
|
||||
byteLength = 0;
|
||||
byteArrays = [];
|
||||
constructor(allocByteArray) {
|
||||
this.allocByteArray = allocByteArray;
|
||||
}
|
||||
push(byteArray) {
|
||||
this.byteArrays.push(byteArray);
|
||||
this.byteLength += byteArray.byteLength;
|
||||
}
|
||||
flush() {
|
||||
if (this.byteArrays.length === 1) {
|
||||
const bytes = this.byteArrays[0];
|
||||
this.reset();
|
||||
return bytes;
|
||||
}
|
||||
const aggregation = this.allocByteArray(this.byteLength);
|
||||
let cursor = 0;
|
||||
for (let i = 0; i < this.byteArrays.length; ++i) {
|
||||
const bytes = this.byteArrays[i];
|
||||
aggregation.set(bytes, cursor);
|
||||
cursor += bytes.byteLength;
|
||||
}
|
||||
this.reset();
|
||||
return aggregation;
|
||||
}
|
||||
reset() {
|
||||
this.byteArrays = [];
|
||||
this.byteLength = 0;
|
||||
}
|
||||
}
|
||||
exports.ByteArrayCollector = ByteArrayCollector;
|
||||
7
extracted-source/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js
generated
vendored
Normal file
7
extracted-source/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChecksumStream = void 0;
|
||||
const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { };
|
||||
class ChecksumStream extends ReadableStreamRef {
|
||||
}
|
||||
exports.ChecksumStream = ChecksumStream;
|
||||
53
extracted-source/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js
generated
vendored
Normal file
53
extracted-source/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChecksumStream = void 0;
|
||||
const util_base64_1 = require("@smithy/util-base64");
|
||||
const stream_1 = require("stream");
|
||||
class ChecksumStream extends stream_1.Duplex {
|
||||
expectedChecksum;
|
||||
checksumSourceLocation;
|
||||
checksum;
|
||||
source;
|
||||
base64Encoder;
|
||||
constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) {
|
||||
super();
|
||||
if (typeof source.pipe === "function") {
|
||||
this.source = source;
|
||||
}
|
||||
else {
|
||||
throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`);
|
||||
}
|
||||
this.base64Encoder = base64Encoder ?? util_base64_1.toBase64;
|
||||
this.expectedChecksum = expectedChecksum;
|
||||
this.checksum = checksum;
|
||||
this.checksumSourceLocation = checksumSourceLocation;
|
||||
this.source.pipe(this);
|
||||
}
|
||||
_read(size) { }
|
||||
_write(chunk, encoding, callback) {
|
||||
try {
|
||||
this.checksum.update(chunk);
|
||||
this.push(chunk);
|
||||
}
|
||||
catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
async _final(callback) {
|
||||
try {
|
||||
const digest = await this.checksum.digest();
|
||||
const received = this.base64Encoder(digest);
|
||||
if (this.expectedChecksum !== received) {
|
||||
return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` +
|
||||
` in response header "${this.checksumSourceLocation}".`));
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
return callback(e);
|
||||
}
|
||||
this.push(null);
|
||||
return callback();
|
||||
}
|
||||
}
|
||||
exports.ChecksumStream = ChecksumStream;
|
||||
39
extracted-source/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js
generated
vendored
Normal file
39
extracted-source/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createChecksumStream = void 0;
|
||||
const util_base64_1 = require("@smithy/util-base64");
|
||||
const stream_type_check_1 = require("../stream-type-check");
|
||||
const ChecksumStream_browser_1 = require("./ChecksumStream.browser");
|
||||
const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => {
|
||||
if (!(0, stream_type_check_1.isReadableStream)(source)) {
|
||||
throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`);
|
||||
}
|
||||
const encoder = base64Encoder ?? util_base64_1.toBase64;
|
||||
if (typeof TransformStream !== "function") {
|
||||
throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream.");
|
||||
}
|
||||
const transform = new TransformStream({
|
||||
start() { },
|
||||
async transform(chunk, controller) {
|
||||
checksum.update(chunk);
|
||||
controller.enqueue(chunk);
|
||||
},
|
||||
async flush(controller) {
|
||||
const digest = await checksum.digest();
|
||||
const received = encoder(digest);
|
||||
if (expectedChecksum !== received) {
|
||||
const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` +
|
||||
` in response header "${checksumSourceLocation}".`);
|
||||
controller.error(error);
|
||||
}
|
||||
else {
|
||||
controller.terminate();
|
||||
}
|
||||
},
|
||||
});
|
||||
source.pipeThrough(transform);
|
||||
const readable = transform.readable;
|
||||
Object.setPrototypeOf(readable, ChecksumStream_browser_1.ChecksumStream.prototype);
|
||||
return readable;
|
||||
};
|
||||
exports.createChecksumStream = createChecksumStream;
|
||||
12
extracted-source/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js
generated
vendored
Normal file
12
extracted-source/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createChecksumStream = createChecksumStream;
|
||||
const stream_type_check_1 = require("../stream-type-check");
|
||||
const ChecksumStream_1 = require("./ChecksumStream");
|
||||
const createChecksumStream_browser_1 = require("./createChecksumStream.browser");
|
||||
function createChecksumStream(init) {
|
||||
if (typeof ReadableStream === "function" && (0, stream_type_check_1.isReadableStream)(init.source)) {
|
||||
return (0, createChecksumStream_browser_1.createChecksumStream)(init);
|
||||
}
|
||||
return new ChecksumStream_1.ChecksumStream(init);
|
||||
}
|
||||
60
extracted-source/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js
generated
vendored
Normal file
60
extracted-source/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createBufferedReadable = createBufferedReadable;
|
||||
const node_stream_1 = require("node:stream");
|
||||
const ByteArrayCollector_1 = require("./ByteArrayCollector");
|
||||
const createBufferedReadableStream_1 = require("./createBufferedReadableStream");
|
||||
const stream_type_check_1 = require("./stream-type-check");
|
||||
function createBufferedReadable(upstream, size, logger) {
|
||||
if ((0, stream_type_check_1.isReadableStream)(upstream)) {
|
||||
return (0, createBufferedReadableStream_1.createBufferedReadableStream)(upstream, size, logger);
|
||||
}
|
||||
const downstream = new node_stream_1.Readable({ read() { } });
|
||||
let streamBufferingLoggedWarning = false;
|
||||
let bytesSeen = 0;
|
||||
const buffers = [
|
||||
"",
|
||||
new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size)),
|
||||
new ByteArrayCollector_1.ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))),
|
||||
];
|
||||
let mode = -1;
|
||||
upstream.on("data", (chunk) => {
|
||||
const chunkMode = (0, createBufferedReadableStream_1.modeOf)(chunk, true);
|
||||
if (mode !== chunkMode) {
|
||||
if (mode >= 0) {
|
||||
downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode));
|
||||
}
|
||||
mode = chunkMode;
|
||||
}
|
||||
if (mode === -1) {
|
||||
downstream.push(chunk);
|
||||
return;
|
||||
}
|
||||
const chunkSize = (0, createBufferedReadableStream_1.sizeOf)(chunk);
|
||||
bytesSeen += chunkSize;
|
||||
const bufferSize = (0, createBufferedReadableStream_1.sizeOf)(buffers[mode]);
|
||||
if (chunkSize >= size && bufferSize === 0) {
|
||||
downstream.push(chunk);
|
||||
}
|
||||
else {
|
||||
const newSize = (0, createBufferedReadableStream_1.merge)(buffers, mode, chunk);
|
||||
if (!streamBufferingLoggedWarning && bytesSeen > size * 2) {
|
||||
streamBufferingLoggedWarning = true;
|
||||
logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`);
|
||||
}
|
||||
if (newSize >= size) {
|
||||
downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode));
|
||||
}
|
||||
}
|
||||
});
|
||||
upstream.on("end", () => {
|
||||
if (mode !== -1) {
|
||||
const remainder = (0, createBufferedReadableStream_1.flush)(buffers, mode);
|
||||
if ((0, createBufferedReadableStream_1.sizeOf)(remainder) > 0) {
|
||||
downstream.push(remainder);
|
||||
}
|
||||
}
|
||||
downstream.push(null);
|
||||
});
|
||||
return downstream;
|
||||
}
|
||||
103
extracted-source/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js
generated
vendored
Normal file
103
extracted-source/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createBufferedReadable = void 0;
|
||||
exports.createBufferedReadableStream = createBufferedReadableStream;
|
||||
exports.merge = merge;
|
||||
exports.flush = flush;
|
||||
exports.sizeOf = sizeOf;
|
||||
exports.modeOf = modeOf;
|
||||
const ByteArrayCollector_1 = require("./ByteArrayCollector");
|
||||
function createBufferedReadableStream(upstream, size, logger) {
|
||||
const reader = upstream.getReader();
|
||||
let streamBufferingLoggedWarning = false;
|
||||
let bytesSeen = 0;
|
||||
const buffers = ["", new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size))];
|
||||
let mode = -1;
|
||||
const pull = async (controller) => {
|
||||
const { value, done } = await reader.read();
|
||||
const chunk = value;
|
||||
if (done) {
|
||||
if (mode !== -1) {
|
||||
const remainder = flush(buffers, mode);
|
||||
if (sizeOf(remainder) > 0) {
|
||||
controller.enqueue(remainder);
|
||||
}
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
else {
|
||||
const chunkMode = modeOf(chunk, false);
|
||||
if (mode !== chunkMode) {
|
||||
if (mode >= 0) {
|
||||
controller.enqueue(flush(buffers, mode));
|
||||
}
|
||||
mode = chunkMode;
|
||||
}
|
||||
if (mode === -1) {
|
||||
controller.enqueue(chunk);
|
||||
return;
|
||||
}
|
||||
const chunkSize = sizeOf(chunk);
|
||||
bytesSeen += chunkSize;
|
||||
const bufferSize = sizeOf(buffers[mode]);
|
||||
if (chunkSize >= size && bufferSize === 0) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
else {
|
||||
const newSize = merge(buffers, mode, chunk);
|
||||
if (!streamBufferingLoggedWarning && bytesSeen > size * 2) {
|
||||
streamBufferingLoggedWarning = true;
|
||||
logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`);
|
||||
}
|
||||
if (newSize >= size) {
|
||||
controller.enqueue(flush(buffers, mode));
|
||||
}
|
||||
else {
|
||||
await pull(controller);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
return new ReadableStream({
|
||||
pull,
|
||||
});
|
||||
}
|
||||
exports.createBufferedReadable = createBufferedReadableStream;
|
||||
function merge(buffers, mode, chunk) {
|
||||
switch (mode) {
|
||||
case 0:
|
||||
buffers[0] += chunk;
|
||||
return sizeOf(buffers[0]);
|
||||
case 1:
|
||||
case 2:
|
||||
buffers[mode].push(chunk);
|
||||
return sizeOf(buffers[mode]);
|
||||
}
|
||||
}
|
||||
function flush(buffers, mode) {
|
||||
switch (mode) {
|
||||
case 0:
|
||||
const s = buffers[0];
|
||||
buffers[0] = "";
|
||||
return s;
|
||||
case 1:
|
||||
case 2:
|
||||
return buffers[mode].flush();
|
||||
}
|
||||
throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`);
|
||||
}
|
||||
function sizeOf(chunk) {
|
||||
return chunk?.byteLength ?? chunk?.length ?? 0;
|
||||
}
|
||||
function modeOf(chunk, allowBuffer = true) {
|
||||
if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) {
|
||||
return 2;
|
||||
}
|
||||
if (chunk instanceof Uint8Array) {
|
||||
return 1;
|
||||
}
|
||||
if (typeof chunk === "string") {
|
||||
return 0;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
30
extracted-source/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js
generated
vendored
Normal file
30
extracted-source/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getAwsChunkedEncodingStream = void 0;
|
||||
const stream_1 = require("stream");
|
||||
const getAwsChunkedEncodingStream = (readableStream, options) => {
|
||||
const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options;
|
||||
const checksumRequired = base64Encoder !== undefined &&
|
||||
checksumAlgorithmFn !== undefined &&
|
||||
checksumLocationName !== undefined &&
|
||||
streamHasher !== undefined;
|
||||
const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined;
|
||||
const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } });
|
||||
readableStream.on("data", (data) => {
|
||||
const length = bodyLengthChecker(data) || 0;
|
||||
awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`);
|
||||
awsChunkedEncodingStream.push(data);
|
||||
awsChunkedEncodingStream.push("\r\n");
|
||||
});
|
||||
readableStream.on("end", async () => {
|
||||
awsChunkedEncodingStream.push(`0\r\n`);
|
||||
if (checksumRequired) {
|
||||
const checksum = base64Encoder(await digest);
|
||||
awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`);
|
||||
awsChunkedEncodingStream.push(`\r\n`);
|
||||
}
|
||||
awsChunkedEncodingStream.push(null);
|
||||
});
|
||||
return awsChunkedEncodingStream;
|
||||
};
|
||||
exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream;
|
||||
34
extracted-source/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js
generated
vendored
Normal file
34
extracted-source/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.headStream = headStream;
|
||||
async function headStream(stream, bytes) {
|
||||
let byteLengthCounter = 0;
|
||||
const chunks = [];
|
||||
const reader = stream.getReader();
|
||||
let isDone = false;
|
||||
while (!isDone) {
|
||||
const { done, value } = await reader.read();
|
||||
if (value) {
|
||||
chunks.push(value);
|
||||
byteLengthCounter += value?.byteLength ?? 0;
|
||||
}
|
||||
if (byteLengthCounter >= bytes) {
|
||||
break;
|
||||
}
|
||||
isDone = done;
|
||||
}
|
||||
reader.releaseLock();
|
||||
const collected = new Uint8Array(Math.min(bytes, byteLengthCounter));
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
if (chunk.byteLength > collected.byteLength - offset) {
|
||||
collected.set(chunk.subarray(0, collected.byteLength - offset), offset);
|
||||
break;
|
||||
}
|
||||
else {
|
||||
collected.set(chunk, offset);
|
||||
}
|
||||
offset += chunk.length;
|
||||
}
|
||||
return collected;
|
||||
}
|
||||
42
extracted-source/node_modules/@smithy/util-stream/dist-cjs/headStream.js
generated
vendored
Normal file
42
extracted-source/node_modules/@smithy/util-stream/dist-cjs/headStream.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.headStream = void 0;
|
||||
const stream_1 = require("stream");
|
||||
const headStream_browser_1 = require("./headStream.browser");
|
||||
const stream_type_check_1 = require("./stream-type-check");
|
||||
const headStream = (stream, bytes) => {
|
||||
if ((0, stream_type_check_1.isReadableStream)(stream)) {
|
||||
return (0, headStream_browser_1.headStream)(stream, bytes);
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
const collector = new Collector();
|
||||
collector.limit = bytes;
|
||||
stream.pipe(collector);
|
||||
stream.on("error", (err) => {
|
||||
collector.end();
|
||||
reject(err);
|
||||
});
|
||||
collector.on("error", reject);
|
||||
collector.on("finish", function () {
|
||||
const bytes = new Uint8Array(Buffer.concat(this.buffers));
|
||||
resolve(bytes);
|
||||
});
|
||||
});
|
||||
};
|
||||
exports.headStream = headStream;
|
||||
class Collector extends stream_1.Writable {
|
||||
buffers = [];
|
||||
limit = Infinity;
|
||||
bytesBuffered = 0;
|
||||
_write(chunk, encoding, callback) {
|
||||
this.buffers.push(chunk);
|
||||
this.bytesBuffered += chunk.byteLength ?? 0;
|
||||
if (this.bytesBuffered >= this.limit) {
|
||||
const excess = this.bytesBuffered - this.limit;
|
||||
const tailBuffer = this.buffers[this.buffers.length - 1];
|
||||
this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess);
|
||||
this.emit("finish");
|
||||
}
|
||||
callback();
|
||||
}
|
||||
}
|
||||
84
extracted-source/node_modules/@smithy/util-stream/dist-cjs/index.js
generated
vendored
Normal file
84
extracted-source/node_modules/@smithy/util-stream/dist-cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
'use strict';
|
||||
|
||||
var utilBase64 = require('@smithy/util-base64');
|
||||
var utilUtf8 = require('@smithy/util-utf8');
|
||||
var ChecksumStream = require('./checksum/ChecksumStream');
|
||||
var createChecksumStream = require('./checksum/createChecksumStream');
|
||||
var createBufferedReadable = require('./createBufferedReadable');
|
||||
var getAwsChunkedEncodingStream = require('./getAwsChunkedEncodingStream');
|
||||
var headStream = require('./headStream');
|
||||
var sdkStreamMixin = require('./sdk-stream-mixin');
|
||||
var splitStream = require('./splitStream');
|
||||
var streamTypeCheck = require('./stream-type-check');
|
||||
|
||||
class Uint8ArrayBlobAdapter extends Uint8Array {
|
||||
static fromString(source, encoding = "utf-8") {
|
||||
if (typeof source === "string") {
|
||||
if (encoding === "base64") {
|
||||
return Uint8ArrayBlobAdapter.mutate(utilBase64.fromBase64(source));
|
||||
}
|
||||
return Uint8ArrayBlobAdapter.mutate(utilUtf8.fromUtf8(source));
|
||||
}
|
||||
throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`);
|
||||
}
|
||||
static mutate(source) {
|
||||
Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype);
|
||||
return source;
|
||||
}
|
||||
transformToString(encoding = "utf-8") {
|
||||
if (encoding === "base64") {
|
||||
return utilBase64.toBase64(this);
|
||||
}
|
||||
return utilUtf8.toUtf8(this);
|
||||
}
|
||||
}
|
||||
|
||||
exports.Uint8ArrayBlobAdapter = Uint8ArrayBlobAdapter;
|
||||
Object.keys(ChecksumStream).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return ChecksumStream[k]; }
|
||||
});
|
||||
});
|
||||
Object.keys(createChecksumStream).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return createChecksumStream[k]; }
|
||||
});
|
||||
});
|
||||
Object.keys(createBufferedReadable).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return createBufferedReadable[k]; }
|
||||
});
|
||||
});
|
||||
Object.keys(getAwsChunkedEncodingStream).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return getAwsChunkedEncodingStream[k]; }
|
||||
});
|
||||
});
|
||||
Object.keys(headStream).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return headStream[k]; }
|
||||
});
|
||||
});
|
||||
Object.keys(sdkStreamMixin).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return sdkStreamMixin[k]; }
|
||||
});
|
||||
});
|
||||
Object.keys(splitStream).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return splitStream[k]; }
|
||||
});
|
||||
});
|
||||
Object.keys(streamTypeCheck).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return streamTypeCheck[k]; }
|
||||
});
|
||||
});
|
||||
68
extracted-source/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js
generated
vendored
Normal file
68
extracted-source/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sdkStreamMixin = void 0;
|
||||
const fetch_http_handler_1 = require("@smithy/fetch-http-handler");
|
||||
const util_base64_1 = require("@smithy/util-base64");
|
||||
const util_hex_encoding_1 = require("@smithy/util-hex-encoding");
|
||||
const util_utf8_1 = require("@smithy/util-utf8");
|
||||
const stream_type_check_1 = require("./stream-type-check");
|
||||
const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed.";
|
||||
const sdkStreamMixin = (stream) => {
|
||||
if (!isBlobInstance(stream) && !(0, stream_type_check_1.isReadableStream)(stream)) {
|
||||
const name = stream?.__proto__?.constructor?.name || stream;
|
||||
throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`);
|
||||
}
|
||||
let transformed = false;
|
||||
const transformToByteArray = async () => {
|
||||
if (transformed) {
|
||||
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
|
||||
}
|
||||
transformed = true;
|
||||
return await (0, fetch_http_handler_1.streamCollector)(stream);
|
||||
};
|
||||
const blobToWebStream = (blob) => {
|
||||
if (typeof blob.stream !== "function") {
|
||||
throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" +
|
||||
"If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body");
|
||||
}
|
||||
return blob.stream();
|
||||
};
|
||||
return Object.assign(stream, {
|
||||
transformToByteArray: transformToByteArray,
|
||||
transformToString: async (encoding) => {
|
||||
const buf = await transformToByteArray();
|
||||
if (encoding === "base64") {
|
||||
return (0, util_base64_1.toBase64)(buf);
|
||||
}
|
||||
else if (encoding === "hex") {
|
||||
return (0, util_hex_encoding_1.toHex)(buf);
|
||||
}
|
||||
else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") {
|
||||
return (0, util_utf8_1.toUtf8)(buf);
|
||||
}
|
||||
else if (typeof TextDecoder === "function") {
|
||||
return new TextDecoder(encoding).decode(buf);
|
||||
}
|
||||
else {
|
||||
throw new Error("TextDecoder is not available, please make sure polyfill is provided.");
|
||||
}
|
||||
},
|
||||
transformToWebStream: () => {
|
||||
if (transformed) {
|
||||
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
|
||||
}
|
||||
transformed = true;
|
||||
if (isBlobInstance(stream)) {
|
||||
return blobToWebStream(stream);
|
||||
}
|
||||
else if ((0, stream_type_check_1.isReadableStream)(stream)) {
|
||||
return stream;
|
||||
}
|
||||
else {
|
||||
throw new Error(`Cannot transform payload to web stream, got ${stream}`);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
exports.sdkStreamMixin = sdkStreamMixin;
|
||||
const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob;
|
||||
54
extracted-source/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js
generated
vendored
Normal file
54
extracted-source/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sdkStreamMixin = void 0;
|
||||
const node_http_handler_1 = require("@smithy/node-http-handler");
|
||||
const util_buffer_from_1 = require("@smithy/util-buffer-from");
|
||||
const stream_1 = require("stream");
|
||||
const sdk_stream_mixin_browser_1 = require("./sdk-stream-mixin.browser");
|
||||
const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed.";
|
||||
const sdkStreamMixin = (stream) => {
|
||||
if (!(stream instanceof stream_1.Readable)) {
|
||||
try {
|
||||
return (0, sdk_stream_mixin_browser_1.sdkStreamMixin)(stream);
|
||||
}
|
||||
catch (e) {
|
||||
const name = stream?.__proto__?.constructor?.name || stream;
|
||||
throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`);
|
||||
}
|
||||
}
|
||||
let transformed = false;
|
||||
const transformToByteArray = async () => {
|
||||
if (transformed) {
|
||||
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
|
||||
}
|
||||
transformed = true;
|
||||
return await (0, node_http_handler_1.streamCollector)(stream);
|
||||
};
|
||||
return Object.assign(stream, {
|
||||
transformToByteArray,
|
||||
transformToString: async (encoding) => {
|
||||
const buf = await transformToByteArray();
|
||||
if (encoding === undefined || Buffer.isEncoding(encoding)) {
|
||||
return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding);
|
||||
}
|
||||
else {
|
||||
const decoder = new TextDecoder(encoding);
|
||||
return decoder.decode(buf);
|
||||
}
|
||||
},
|
||||
transformToWebStream: () => {
|
||||
if (transformed) {
|
||||
throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED);
|
||||
}
|
||||
if (stream.readableFlowing !== null) {
|
||||
throw new Error("The stream has been consumed by other callbacks.");
|
||||
}
|
||||
if (typeof stream_1.Readable.toWeb !== "function") {
|
||||
throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available.");
|
||||
}
|
||||
transformed = true;
|
||||
return stream_1.Readable.toWeb(stream);
|
||||
},
|
||||
});
|
||||
};
|
||||
exports.sdkStreamMixin = sdkStreamMixin;
|
||||
10
extracted-source/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js
generated
vendored
Normal file
10
extracted-source/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.splitStream = splitStream;
|
||||
async function splitStream(stream) {
|
||||
if (typeof stream.stream === "function") {
|
||||
stream = stream.stream();
|
||||
}
|
||||
const readableStream = stream;
|
||||
return readableStream.tee();
|
||||
}
|
||||
16
extracted-source/node_modules/@smithy/util-stream/dist-cjs/splitStream.js
generated
vendored
Normal file
16
extracted-source/node_modules/@smithy/util-stream/dist-cjs/splitStream.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.splitStream = splitStream;
|
||||
const stream_1 = require("stream");
|
||||
const splitStream_browser_1 = require("./splitStream.browser");
|
||||
const stream_type_check_1 = require("./stream-type-check");
|
||||
async function splitStream(stream) {
|
||||
if ((0, stream_type_check_1.isReadableStream)(stream) || (0, stream_type_check_1.isBlob)(stream)) {
|
||||
return (0, splitStream_browser_1.splitStream)(stream);
|
||||
}
|
||||
const stream1 = new stream_1.PassThrough();
|
||||
const stream2 = new stream_1.PassThrough();
|
||||
stream.pipe(stream1);
|
||||
stream.pipe(stream2);
|
||||
return [stream1, stream2];
|
||||
}
|
||||
10
extracted-source/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js
generated
vendored
Normal file
10
extracted-source/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isBlob = exports.isReadableStream = void 0;
|
||||
const isReadableStream = (stream) => typeof ReadableStream === "function" &&
|
||||
(stream?.constructor?.name === ReadableStream.name || stream instanceof ReadableStream);
|
||||
exports.isReadableStream = isReadableStream;
|
||||
const isBlob = (blob) => {
|
||||
return typeof Blob === "function" && (blob?.constructor?.name === Blob.name || blob instanceof Blob);
|
||||
};
|
||||
exports.isBlob = isBlob;
|
||||
216
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js
generated
vendored
Normal file
216
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
'use strict';
|
||||
|
||||
var protocolHttp = require('@smithy/protocol-http');
|
||||
var querystringBuilder = require('@smithy/querystring-builder');
|
||||
var utilBase64 = require('@smithy/util-base64');
|
||||
|
||||
function createRequest(url, requestOptions) {
|
||||
return new Request(url, requestOptions);
|
||||
}
|
||||
|
||||
function requestTimeout(timeoutInMs = 0) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (timeoutInMs) {
|
||||
setTimeout(() => {
|
||||
const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`);
|
||||
timeoutError.name = "TimeoutError";
|
||||
reject(timeoutError);
|
||||
}, timeoutInMs);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const keepAliveSupport = {
|
||||
supported: undefined,
|
||||
};
|
||||
class FetchHttpHandler {
|
||||
config;
|
||||
configProvider;
|
||||
static create(instanceOrOptions) {
|
||||
if (typeof instanceOrOptions?.handle === "function") {
|
||||
return instanceOrOptions;
|
||||
}
|
||||
return new FetchHttpHandler(instanceOrOptions);
|
||||
}
|
||||
constructor(options) {
|
||||
if (typeof options === "function") {
|
||||
this.configProvider = options().then((opts) => opts || {});
|
||||
}
|
||||
else {
|
||||
this.config = options ?? {};
|
||||
this.configProvider = Promise.resolve(this.config);
|
||||
}
|
||||
if (keepAliveSupport.supported === undefined) {
|
||||
keepAliveSupport.supported = Boolean(typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]"));
|
||||
}
|
||||
}
|
||||
destroy() {
|
||||
}
|
||||
async handle(request, { abortSignal, requestTimeout: requestTimeout$1 } = {}) {
|
||||
if (!this.config) {
|
||||
this.config = await this.configProvider;
|
||||
}
|
||||
const requestTimeoutInMs = requestTimeout$1 ?? this.config.requestTimeout;
|
||||
const keepAlive = this.config.keepAlive === true;
|
||||
const credentials = this.config.credentials;
|
||||
if (abortSignal?.aborted) {
|
||||
const abortError = new Error("Request aborted");
|
||||
abortError.name = "AbortError";
|
||||
return Promise.reject(abortError);
|
||||
}
|
||||
let path = request.path;
|
||||
const queryString = querystringBuilder.buildQueryString(request.query || {});
|
||||
if (queryString) {
|
||||
path += `?${queryString}`;
|
||||
}
|
||||
if (request.fragment) {
|
||||
path += `#${request.fragment}`;
|
||||
}
|
||||
let auth = "";
|
||||
if (request.username != null || request.password != null) {
|
||||
const username = request.username ?? "";
|
||||
const password = request.password ?? "";
|
||||
auth = `${username}:${password}@`;
|
||||
}
|
||||
const { port, method } = request;
|
||||
const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`;
|
||||
const body = method === "GET" || method === "HEAD" ? undefined : request.body;
|
||||
const requestOptions = {
|
||||
body,
|
||||
headers: new Headers(request.headers),
|
||||
method: method,
|
||||
credentials,
|
||||
};
|
||||
if (this.config?.cache) {
|
||||
requestOptions.cache = this.config.cache;
|
||||
}
|
||||
if (body) {
|
||||
requestOptions.duplex = "half";
|
||||
}
|
||||
if (typeof AbortController !== "undefined") {
|
||||
requestOptions.signal = abortSignal;
|
||||
}
|
||||
if (keepAliveSupport.supported) {
|
||||
requestOptions.keepalive = keepAlive;
|
||||
}
|
||||
if (typeof this.config.requestInit === "function") {
|
||||
Object.assign(requestOptions, this.config.requestInit(request));
|
||||
}
|
||||
let removeSignalEventListener = () => { };
|
||||
const fetchRequest = createRequest(url, requestOptions);
|
||||
const raceOfPromises = [
|
||||
fetch(fetchRequest).then((response) => {
|
||||
const fetchHeaders = response.headers;
|
||||
const transformedHeaders = {};
|
||||
for (const pair of fetchHeaders.entries()) {
|
||||
transformedHeaders[pair[0]] = pair[1];
|
||||
}
|
||||
const hasReadableStream = response.body != undefined;
|
||||
if (!hasReadableStream) {
|
||||
return response.blob().then((body) => ({
|
||||
response: new protocolHttp.HttpResponse({
|
||||
headers: transformedHeaders,
|
||||
reason: response.statusText,
|
||||
statusCode: response.status,
|
||||
body,
|
||||
}),
|
||||
}));
|
||||
}
|
||||
return {
|
||||
response: new protocolHttp.HttpResponse({
|
||||
headers: transformedHeaders,
|
||||
reason: response.statusText,
|
||||
statusCode: response.status,
|
||||
body: response.body,
|
||||
}),
|
||||
};
|
||||
}),
|
||||
requestTimeout(requestTimeoutInMs),
|
||||
];
|
||||
if (abortSignal) {
|
||||
raceOfPromises.push(new Promise((resolve, reject) => {
|
||||
const onAbort = () => {
|
||||
const abortError = new Error("Request aborted");
|
||||
abortError.name = "AbortError";
|
||||
reject(abortError);
|
||||
};
|
||||
if (typeof abortSignal.addEventListener === "function") {
|
||||
const signal = abortSignal;
|
||||
signal.addEventListener("abort", onAbort, { once: true });
|
||||
removeSignalEventListener = () => signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
else {
|
||||
abortSignal.onabort = onAbort;
|
||||
}
|
||||
}));
|
||||
}
|
||||
return Promise.race(raceOfPromises).finally(removeSignalEventListener);
|
||||
}
|
||||
updateHttpClientConfig(key, value) {
|
||||
this.config = undefined;
|
||||
this.configProvider = this.configProvider.then((config) => {
|
||||
config[key] = value;
|
||||
return config;
|
||||
});
|
||||
}
|
||||
httpHandlerConfigs() {
|
||||
return this.config ?? {};
|
||||
}
|
||||
}
|
||||
|
||||
const streamCollector = async (stream) => {
|
||||
if ((typeof Blob === "function" && stream instanceof Blob) || stream.constructor?.name === "Blob") {
|
||||
if (Blob.prototype.arrayBuffer !== undefined) {
|
||||
return new Uint8Array(await stream.arrayBuffer());
|
||||
}
|
||||
return collectBlob(stream);
|
||||
}
|
||||
return collectStream(stream);
|
||||
};
|
||||
async function collectBlob(blob) {
|
||||
const base64 = await readToBase64(blob);
|
||||
const arrayBuffer = utilBase64.fromBase64(base64);
|
||||
return new Uint8Array(arrayBuffer);
|
||||
}
|
||||
async function collectStream(stream) {
|
||||
const chunks = [];
|
||||
const reader = stream.getReader();
|
||||
let isDone = false;
|
||||
let length = 0;
|
||||
while (!isDone) {
|
||||
const { done, value } = await reader.read();
|
||||
if (value) {
|
||||
chunks.push(value);
|
||||
length += value.length;
|
||||
}
|
||||
isDone = done;
|
||||
}
|
||||
const collected = new Uint8Array(length);
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
collected.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return collected;
|
||||
}
|
||||
function readToBase64(blob) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onloadend = () => {
|
||||
if (reader.readyState !== 2) {
|
||||
return reject(new Error("Reader aborted too early"));
|
||||
}
|
||||
const result = (reader.result ?? "");
|
||||
const commaIndex = result.indexOf(",");
|
||||
const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length;
|
||||
resolve(result.substring(dataOffset));
|
||||
};
|
||||
reader.onabort = () => reject(new Error("Read aborted"));
|
||||
reader.onerror = () => reject(reader.error);
|
||||
reader.readAsDataURL(blob);
|
||||
});
|
||||
}
|
||||
|
||||
exports.FetchHttpHandler = FetchHttpHandler;
|
||||
exports.keepAliveSupport = keepAliveSupport;
|
||||
exports.streamCollector = streamCollector;
|
||||
@@ -0,0 +1,169 @@
|
||||
'use strict';
|
||||
|
||||
var types = require('@smithy/types');
|
||||
|
||||
const getHttpHandlerExtensionConfiguration = (runtimeConfig) => {
|
||||
return {
|
||||
setHttpHandler(handler) {
|
||||
runtimeConfig.httpHandler = handler;
|
||||
},
|
||||
httpHandler() {
|
||||
return runtimeConfig.httpHandler;
|
||||
},
|
||||
updateHttpClientConfig(key, value) {
|
||||
runtimeConfig.httpHandler?.updateHttpClientConfig(key, value);
|
||||
},
|
||||
httpHandlerConfigs() {
|
||||
return runtimeConfig.httpHandler.httpHandlerConfigs();
|
||||
},
|
||||
};
|
||||
};
|
||||
const resolveHttpHandlerRuntimeConfig = (httpHandlerExtensionConfiguration) => {
|
||||
return {
|
||||
httpHandler: httpHandlerExtensionConfiguration.httpHandler(),
|
||||
};
|
||||
};
|
||||
|
||||
class Field {
|
||||
name;
|
||||
kind;
|
||||
values;
|
||||
constructor({ name, kind = types.FieldPosition.HEADER, values = [] }) {
|
||||
this.name = name;
|
||||
this.kind = kind;
|
||||
this.values = values;
|
||||
}
|
||||
add(value) {
|
||||
this.values.push(value);
|
||||
}
|
||||
set(values) {
|
||||
this.values = values;
|
||||
}
|
||||
remove(value) {
|
||||
this.values = this.values.filter((v) => v !== value);
|
||||
}
|
||||
toString() {
|
||||
return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", ");
|
||||
}
|
||||
get() {
|
||||
return this.values;
|
||||
}
|
||||
}
|
||||
|
||||
class Fields {
|
||||
entries = {};
|
||||
encoding;
|
||||
constructor({ fields = [], encoding = "utf-8" }) {
|
||||
fields.forEach(this.setField.bind(this));
|
||||
this.encoding = encoding;
|
||||
}
|
||||
setField(field) {
|
||||
this.entries[field.name.toLowerCase()] = field;
|
||||
}
|
||||
getField(name) {
|
||||
return this.entries[name.toLowerCase()];
|
||||
}
|
||||
removeField(name) {
|
||||
delete this.entries[name.toLowerCase()];
|
||||
}
|
||||
getByType(kind) {
|
||||
return Object.values(this.entries).filter((field) => field.kind === kind);
|
||||
}
|
||||
}
|
||||
|
||||
class HttpRequest {
|
||||
method;
|
||||
protocol;
|
||||
hostname;
|
||||
port;
|
||||
path;
|
||||
query;
|
||||
headers;
|
||||
username;
|
||||
password;
|
||||
fragment;
|
||||
body;
|
||||
constructor(options) {
|
||||
this.method = options.method || "GET";
|
||||
this.hostname = options.hostname || "localhost";
|
||||
this.port = options.port;
|
||||
this.query = options.query || {};
|
||||
this.headers = options.headers || {};
|
||||
this.body = options.body;
|
||||
this.protocol = options.protocol
|
||||
? options.protocol.slice(-1) !== ":"
|
||||
? `${options.protocol}:`
|
||||
: options.protocol
|
||||
: "https:";
|
||||
this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/";
|
||||
this.username = options.username;
|
||||
this.password = options.password;
|
||||
this.fragment = options.fragment;
|
||||
}
|
||||
static clone(request) {
|
||||
const cloned = new HttpRequest({
|
||||
...request,
|
||||
headers: { ...request.headers },
|
||||
});
|
||||
if (cloned.query) {
|
||||
cloned.query = cloneQuery(cloned.query);
|
||||
}
|
||||
return cloned;
|
||||
}
|
||||
static isInstance(request) {
|
||||
if (!request) {
|
||||
return false;
|
||||
}
|
||||
const req = request;
|
||||
return ("method" in req &&
|
||||
"protocol" in req &&
|
||||
"hostname" in req &&
|
||||
"path" in req &&
|
||||
typeof req["query"] === "object" &&
|
||||
typeof req["headers"] === "object");
|
||||
}
|
||||
clone() {
|
||||
return HttpRequest.clone(this);
|
||||
}
|
||||
}
|
||||
function cloneQuery(query) {
|
||||
return Object.keys(query).reduce((carry, paramName) => {
|
||||
const param = query[paramName];
|
||||
return {
|
||||
...carry,
|
||||
[paramName]: Array.isArray(param) ? [...param] : param,
|
||||
};
|
||||
}, {});
|
||||
}
|
||||
|
||||
class HttpResponse {
|
||||
statusCode;
|
||||
reason;
|
||||
headers;
|
||||
body;
|
||||
constructor(options) {
|
||||
this.statusCode = options.statusCode;
|
||||
this.reason = options.reason;
|
||||
this.headers = options.headers || {};
|
||||
this.body = options.body;
|
||||
}
|
||||
static isInstance(response) {
|
||||
if (!response)
|
||||
return false;
|
||||
const resp = response;
|
||||
return typeof resp.statusCode === "number" && typeof resp.headers === "object";
|
||||
}
|
||||
}
|
||||
|
||||
function isValidHostname(hostname) {
|
||||
const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/;
|
||||
return hostPattern.test(hostname);
|
||||
}
|
||||
|
||||
exports.Field = Field;
|
||||
exports.Fields = Fields;
|
||||
exports.HttpRequest = HttpRequest;
|
||||
exports.HttpResponse = HttpResponse;
|
||||
exports.getHttpHandlerExtensionConfiguration = getHttpHandlerExtensionConfiguration;
|
||||
exports.isValidHostname = isValidHostname;
|
||||
exports.resolveHttpHandlerRuntimeConfig = resolveHttpHandlerRuntimeConfig;
|
||||
@@ -0,0 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
var utilUriEscape = require('@smithy/util-uri-escape');
|
||||
|
||||
function buildQueryString(query) {
|
||||
const parts = [];
|
||||
for (let key of Object.keys(query).sort()) {
|
||||
const value = query[key];
|
||||
key = utilUriEscape.escapeUri(key);
|
||||
if (Array.isArray(value)) {
|
||||
for (let i = 0, iLen = value.length; i < iLen; i++) {
|
||||
parts.push(`${key}=${utilUriEscape.escapeUri(value[i])}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
let qsEntry = key;
|
||||
if (value || typeof value === "string") {
|
||||
qsEntry += `=${utilUriEscape.escapeUri(value)}`;
|
||||
}
|
||||
parts.push(qsEntry);
|
||||
}
|
||||
}
|
||||
return parts.join("&");
|
||||
}
|
||||
|
||||
exports.buildQueryString = buildQueryString;
|
||||
@@ -0,0 +1,9 @@
|
||||
'use strict';
|
||||
|
||||
const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode);
|
||||
const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`;
|
||||
|
||||
const escapeUriPath = (uri) => uri.split("/").map(escapeUri).join("/");
|
||||
|
||||
exports.escapeUri = escapeUri;
|
||||
exports.escapeUriPath = escapeUriPath;
|
||||
91
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/types/dist-cjs/index.js
generated
vendored
Normal file
91
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/types/dist-cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
'use strict';
|
||||
|
||||
exports.HttpAuthLocation = void 0;
|
||||
(function (HttpAuthLocation) {
|
||||
HttpAuthLocation["HEADER"] = "header";
|
||||
HttpAuthLocation["QUERY"] = "query";
|
||||
})(exports.HttpAuthLocation || (exports.HttpAuthLocation = {}));
|
||||
|
||||
exports.HttpApiKeyAuthLocation = void 0;
|
||||
(function (HttpApiKeyAuthLocation) {
|
||||
HttpApiKeyAuthLocation["HEADER"] = "header";
|
||||
HttpApiKeyAuthLocation["QUERY"] = "query";
|
||||
})(exports.HttpApiKeyAuthLocation || (exports.HttpApiKeyAuthLocation = {}));
|
||||
|
||||
exports.EndpointURLScheme = void 0;
|
||||
(function (EndpointURLScheme) {
|
||||
EndpointURLScheme["HTTP"] = "http";
|
||||
EndpointURLScheme["HTTPS"] = "https";
|
||||
})(exports.EndpointURLScheme || (exports.EndpointURLScheme = {}));
|
||||
|
||||
exports.AlgorithmId = void 0;
|
||||
(function (AlgorithmId) {
|
||||
AlgorithmId["MD5"] = "md5";
|
||||
AlgorithmId["CRC32"] = "crc32";
|
||||
AlgorithmId["CRC32C"] = "crc32c";
|
||||
AlgorithmId["SHA1"] = "sha1";
|
||||
AlgorithmId["SHA256"] = "sha256";
|
||||
})(exports.AlgorithmId || (exports.AlgorithmId = {}));
|
||||
const getChecksumConfiguration = (runtimeConfig) => {
|
||||
const checksumAlgorithms = [];
|
||||
if (runtimeConfig.sha256 !== undefined) {
|
||||
checksumAlgorithms.push({
|
||||
algorithmId: () => exports.AlgorithmId.SHA256,
|
||||
checksumConstructor: () => runtimeConfig.sha256,
|
||||
});
|
||||
}
|
||||
if (runtimeConfig.md5 != undefined) {
|
||||
checksumAlgorithms.push({
|
||||
algorithmId: () => exports.AlgorithmId.MD5,
|
||||
checksumConstructor: () => runtimeConfig.md5,
|
||||
});
|
||||
}
|
||||
return {
|
||||
addChecksumAlgorithm(algo) {
|
||||
checksumAlgorithms.push(algo);
|
||||
},
|
||||
checksumAlgorithms() {
|
||||
return checksumAlgorithms;
|
||||
},
|
||||
};
|
||||
};
|
||||
const resolveChecksumRuntimeConfig = (clientConfig) => {
|
||||
const runtimeConfig = {};
|
||||
clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => {
|
||||
runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor();
|
||||
});
|
||||
return runtimeConfig;
|
||||
};
|
||||
|
||||
const getDefaultClientConfiguration = (runtimeConfig) => {
|
||||
return getChecksumConfiguration(runtimeConfig);
|
||||
};
|
||||
const resolveDefaultRuntimeConfig = (config) => {
|
||||
return resolveChecksumRuntimeConfig(config);
|
||||
};
|
||||
|
||||
exports.FieldPosition = void 0;
|
||||
(function (FieldPosition) {
|
||||
FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER";
|
||||
FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER";
|
||||
})(exports.FieldPosition || (exports.FieldPosition = {}));
|
||||
|
||||
const SMITHY_CONTEXT_KEY = "__smithy_context";
|
||||
|
||||
exports.IniSectionType = void 0;
|
||||
(function (IniSectionType) {
|
||||
IniSectionType["PROFILE"] = "profile";
|
||||
IniSectionType["SSO_SESSION"] = "sso-session";
|
||||
IniSectionType["SERVICES"] = "services";
|
||||
})(exports.IniSectionType || (exports.IniSectionType = {}));
|
||||
|
||||
exports.RequestHandlerProtocol = void 0;
|
||||
(function (RequestHandlerProtocol) {
|
||||
RequestHandlerProtocol["HTTP_0_9"] = "http/0.9";
|
||||
RequestHandlerProtocol["HTTP_1_0"] = "http/1.0";
|
||||
RequestHandlerProtocol["TDS_8_0"] = "tds/8.0";
|
||||
})(exports.RequestHandlerProtocol || (exports.RequestHandlerProtocol = {}));
|
||||
|
||||
exports.SMITHY_CONTEXT_KEY = SMITHY_CONTEXT_KEY;
|
||||
exports.getDefaultClientConfiguration = getDefaultClientConfiguration;
|
||||
exports.resolveDefaultRuntimeConfig = resolveDefaultRuntimeConfig;
|
||||
16
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js
generated
vendored
Normal file
16
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.fromBase64 = void 0;
|
||||
const util_buffer_from_1 = require("@smithy/util-buffer-from");
|
||||
const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/;
|
||||
const fromBase64 = (input) => {
|
||||
if ((input.length * 3) % 4 !== 0) {
|
||||
throw new TypeError(`Incorrect padding on base64 string.`);
|
||||
}
|
||||
if (!BASE64_REGEX.exec(input)) {
|
||||
throw new TypeError(`Invalid base64 string.`);
|
||||
}
|
||||
const buffer = (0, util_buffer_from_1.fromString)(input, "base64");
|
||||
return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);
|
||||
};
|
||||
exports.fromBase64 = fromBase64;
|
||||
19
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-base64/dist-cjs/index.js
generated
vendored
Normal file
19
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-base64/dist-cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict';
|
||||
|
||||
var fromBase64 = require('./fromBase64');
|
||||
var toBase64 = require('./toBase64');
|
||||
|
||||
|
||||
|
||||
Object.keys(fromBase64).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return fromBase64[k]; }
|
||||
});
|
||||
});
|
||||
Object.keys(toBase64).forEach(function (k) {
|
||||
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
||||
enumerable: true,
|
||||
get: function () { return toBase64[k]; }
|
||||
});
|
||||
});
|
||||
19
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-base64/dist-cjs/toBase64.js
generated
vendored
Normal file
19
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-base64/dist-cjs/toBase64.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.toBase64 = void 0;
|
||||
const util_buffer_from_1 = require("@smithy/util-buffer-from");
|
||||
const util_utf8_1 = require("@smithy/util-utf8");
|
||||
const toBase64 = (_input) => {
|
||||
let input;
|
||||
if (typeof _input === "string") {
|
||||
input = (0, util_utf8_1.fromUtf8)(_input);
|
||||
}
|
||||
else {
|
||||
input = _input;
|
||||
}
|
||||
if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") {
|
||||
throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array.");
|
||||
}
|
||||
return (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64");
|
||||
};
|
||||
exports.toBase64 = toBase64;
|
||||
20
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-buffer-from/dist-cjs/index.js
generated
vendored
Normal file
20
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-buffer-from/dist-cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict';
|
||||
|
||||
var isArrayBuffer = require('@smithy/is-array-buffer');
|
||||
var buffer = require('buffer');
|
||||
|
||||
const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => {
|
||||
if (!isArrayBuffer.isArrayBuffer(input)) {
|
||||
throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`);
|
||||
}
|
||||
return buffer.Buffer.from(input, offset, length);
|
||||
};
|
||||
const fromString = (input, encoding) => {
|
||||
if (typeof input !== "string") {
|
||||
throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`);
|
||||
}
|
||||
return encoding ? buffer.Buffer.from(input, encoding) : buffer.Buffer.from(input);
|
||||
};
|
||||
|
||||
exports.fromArrayBuffer = fromArrayBuffer;
|
||||
exports.fromString = fromString;
|
||||
@@ -0,0 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) ||
|
||||
Object.prototype.toString.call(arg) === "[object ArrayBuffer]";
|
||||
|
||||
exports.isArrayBuffer = isArrayBuffer;
|
||||
38
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js
generated
vendored
Normal file
38
extracted-source/node_modules/@smithy/util-stream/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict';
|
||||
|
||||
const SHORT_TO_HEX = {};
|
||||
const HEX_TO_SHORT = {};
|
||||
for (let i = 0; i < 256; i++) {
|
||||
let encodedByte = i.toString(16).toLowerCase();
|
||||
if (encodedByte.length === 1) {
|
||||
encodedByte = `0${encodedByte}`;
|
||||
}
|
||||
SHORT_TO_HEX[i] = encodedByte;
|
||||
HEX_TO_SHORT[encodedByte] = i;
|
||||
}
|
||||
function fromHex(encoded) {
|
||||
if (encoded.length % 2 !== 0) {
|
||||
throw new Error("Hex encoded strings must have an even number length");
|
||||
}
|
||||
const out = new Uint8Array(encoded.length / 2);
|
||||
for (let i = 0; i < encoded.length; i += 2) {
|
||||
const encodedByte = encoded.slice(i, i + 2).toLowerCase();
|
||||
if (encodedByte in HEX_TO_SHORT) {
|
||||
out[i / 2] = HEX_TO_SHORT[encodedByte];
|
||||
}
|
||||
else {
|
||||
throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
function toHex(bytes) {
|
||||
let out = "";
|
||||
for (let i = 0; i < bytes.byteLength; i++) {
|
||||
out += SHORT_TO_HEX[bytes[i]];
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
exports.fromHex = fromHex;
|
||||
exports.toHex = toHex;
|
||||
Reference in New Issue
Block a user