mirror of
https://github.com/tvytlx/ai-agent-deep-dive.git
synced 2026-04-21 05:05:08 +08:00
Add extracted source directory and README navigation
This commit is contained in:
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/constants.mjs
generated
vendored
Normal file
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/constants.mjs
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
// File containing shared constants
|
||||
/**
|
||||
* Model-specific timeout constraints for non-streaming requests
|
||||
*/
|
||||
export const MODEL_NONSTREAMING_TOKENS = {
|
||||
'claude-opus-4-20250514': 8192,
|
||||
'claude-opus-4-0': 8192,
|
||||
'claude-4-opus-20250514': 8192,
|
||||
'anthropic.claude-opus-4-20250514-v1:0': 8192,
|
||||
'claude-opus-4@20250514': 8192,
|
||||
'claude-opus-4-1-20250805': 8192,
|
||||
'anthropic.claude-opus-4-1-20250805-v1:0': 8192,
|
||||
'claude-opus-4-1@20250805': 8192,
|
||||
};
|
||||
//# sourceMappingURL=constants.mjs.map
|
||||
35
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/jsonl.mjs
generated
vendored
Normal file
35
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/jsonl.mjs
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
import { ReadableStreamToAsyncIterable } from "../shims.mjs";
|
||||
import { LineDecoder } from "./line.mjs";
|
||||
export class JSONLDecoder {
|
||||
constructor(iterator, controller) {
|
||||
this.iterator = iterator;
|
||||
this.controller = controller;
|
||||
}
|
||||
async *decoder() {
|
||||
const lineDecoder = new LineDecoder();
|
||||
for await (const chunk of this.iterator) {
|
||||
for (const line of lineDecoder.decode(chunk)) {
|
||||
yield JSON.parse(line);
|
||||
}
|
||||
}
|
||||
for (const line of lineDecoder.flush()) {
|
||||
yield JSON.parse(line);
|
||||
}
|
||||
}
|
||||
[Symbol.asyncIterator]() {
|
||||
return this.decoder();
|
||||
}
|
||||
static fromResponse(response, controller) {
|
||||
if (!response.body) {
|
||||
controller.abort();
|
||||
if (typeof globalThis.navigator !== 'undefined' &&
|
||||
globalThis.navigator.product === 'ReactNative') {
|
||||
throw new AnthropicError(`The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`);
|
||||
}
|
||||
throw new AnthropicError(`Attempted to iterate over a response with no body`);
|
||||
}
|
||||
return new JSONLDecoder(ReadableStreamToAsyncIterable(response.body), controller);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=jsonl.mjs.map
|
||||
108
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/line.mjs
generated
vendored
Normal file
108
extracted-source/node_modules/@anthropic-ai/sdk/internal/decoders/line.mjs
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
var _LineDecoder_buffer, _LineDecoder_carriageReturnIndex;
|
||||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "../tslib.mjs";
|
||||
import { concatBytes, decodeUTF8, encodeUTF8 } from "../utils/bytes.mjs";
|
||||
/**
|
||||
* A re-implementation of httpx's `LineDecoder` in Python that handles incrementally
|
||||
* reading lines from text.
|
||||
*
|
||||
* https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258
|
||||
*/
|
||||
export class LineDecoder {
|
||||
constructor() {
|
||||
_LineDecoder_buffer.set(this, void 0);
|
||||
_LineDecoder_carriageReturnIndex.set(this, void 0);
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, new Uint8Array(), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
}
|
||||
decode(chunk) {
|
||||
if (chunk == null) {
|
||||
return [];
|
||||
}
|
||||
const binaryChunk = chunk instanceof ArrayBuffer ? new Uint8Array(chunk)
|
||||
: typeof chunk === 'string' ? encodeUTF8(chunk)
|
||||
: chunk;
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, concatBytes([__classPrivateFieldGet(this, _LineDecoder_buffer, "f"), binaryChunk]), "f");
|
||||
const lines = [];
|
||||
let patternIndex;
|
||||
while ((patternIndex = findNewlineIndex(__classPrivateFieldGet(this, _LineDecoder_buffer, "f"), __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f"))) != null) {
|
||||
if (patternIndex.carriage && __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") == null) {
|
||||
// skip until we either get a corresponding `\n`, a new `\r` or nothing
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, patternIndex.index, "f");
|
||||
continue;
|
||||
}
|
||||
// we got double \r or \rtext\n
|
||||
if (__classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") != null &&
|
||||
(patternIndex.index !== __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") + 1 || patternIndex.carriage)) {
|
||||
lines.push(decodeUTF8(__classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(0, __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") - 1)));
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, __classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(__classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f")), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
continue;
|
||||
}
|
||||
const endIndex = __classPrivateFieldGet(this, _LineDecoder_carriageReturnIndex, "f") !== null ? patternIndex.preceding - 1 : patternIndex.preceding;
|
||||
const line = decodeUTF8(__classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(0, endIndex));
|
||||
lines.push(line);
|
||||
__classPrivateFieldSet(this, _LineDecoder_buffer, __classPrivateFieldGet(this, _LineDecoder_buffer, "f").subarray(patternIndex.index), "f");
|
||||
__classPrivateFieldSet(this, _LineDecoder_carriageReturnIndex, null, "f");
|
||||
}
|
||||
return lines;
|
||||
}
|
||||
flush() {
|
||||
if (!__classPrivateFieldGet(this, _LineDecoder_buffer, "f").length) {
|
||||
return [];
|
||||
}
|
||||
return this.decode('\n');
|
||||
}
|
||||
}
|
||||
_LineDecoder_buffer = new WeakMap(), _LineDecoder_carriageReturnIndex = new WeakMap();
|
||||
// prettier-ignore
|
||||
LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']);
|
||||
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g;
|
||||
/**
|
||||
* This function searches the buffer for the end patterns, (\r or \n)
|
||||
* and returns an object with the index preceding the matched newline and the
|
||||
* index after the newline char. `null` is returned if no new line is found.
|
||||
*
|
||||
* ```ts
|
||||
* findNewLineIndex('abc\ndef') -> { preceding: 2, index: 3 }
|
||||
* ```
|
||||
*/
|
||||
function findNewlineIndex(buffer, startIndex) {
|
||||
const newline = 0x0a; // \n
|
||||
const carriage = 0x0d; // \r
|
||||
for (let i = startIndex ?? 0; i < buffer.length; i++) {
|
||||
if (buffer[i] === newline) {
|
||||
return { preceding: i, index: i + 1, carriage: false };
|
||||
}
|
||||
if (buffer[i] === carriage) {
|
||||
return { preceding: i, index: i + 1, carriage: true };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
export function findDoubleNewlineIndex(buffer) {
|
||||
// This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n)
|
||||
// and returns the index right after the first occurrence of any pattern,
|
||||
// or -1 if none of the patterns are found.
|
||||
const newline = 0x0a; // \n
|
||||
const carriage = 0x0d; // \r
|
||||
for (let i = 0; i < buffer.length - 1; i++) {
|
||||
if (buffer[i] === newline && buffer[i + 1] === newline) {
|
||||
// \n\n
|
||||
return i + 2;
|
||||
}
|
||||
if (buffer[i] === carriage && buffer[i + 1] === carriage) {
|
||||
// \r\r
|
||||
return i + 2;
|
||||
}
|
||||
if (buffer[i] === carriage &&
|
||||
buffer[i + 1] === newline &&
|
||||
i + 3 < buffer.length &&
|
||||
buffer[i + 2] === carriage &&
|
||||
buffer[i + 3] === newline) {
|
||||
// \r\n\r\n
|
||||
return i + 4;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
//# sourceMappingURL=line.mjs.map
|
||||
157
extracted-source/node_modules/@anthropic-ai/sdk/internal/detect-platform.mjs
generated
vendored
Normal file
157
extracted-source/node_modules/@anthropic-ai/sdk/internal/detect-platform.mjs
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { VERSION } from "../version.mjs";
|
||||
export const isRunningInBrowser = () => {
|
||||
return (
|
||||
// @ts-ignore
|
||||
typeof window !== 'undefined' &&
|
||||
// @ts-ignore
|
||||
typeof window.document !== 'undefined' &&
|
||||
// @ts-ignore
|
||||
typeof navigator !== 'undefined');
|
||||
};
|
||||
/**
|
||||
* Note this does not detect 'browser'; for that, use getBrowserInfo().
|
||||
*/
|
||||
function getDetectedPlatform() {
|
||||
if (typeof Deno !== 'undefined' && Deno.build != null) {
|
||||
return 'deno';
|
||||
}
|
||||
if (typeof EdgeRuntime !== 'undefined') {
|
||||
return 'edge';
|
||||
}
|
||||
if (Object.prototype.toString.call(typeof globalThis.process !== 'undefined' ? globalThis.process : 0) === '[object process]') {
|
||||
return 'node';
|
||||
}
|
||||
return 'unknown';
|
||||
}
|
||||
const getPlatformProperties = () => {
|
||||
const detectedPlatform = getDetectedPlatform();
|
||||
if (detectedPlatform === 'deno') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': normalizePlatform(Deno.build.os),
|
||||
'X-Stainless-Arch': normalizeArch(Deno.build.arch),
|
||||
'X-Stainless-Runtime': 'deno',
|
||||
'X-Stainless-Runtime-Version': typeof Deno.version === 'string' ? Deno.version : Deno.version?.deno ?? 'unknown',
|
||||
};
|
||||
}
|
||||
if (typeof EdgeRuntime !== 'undefined') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': `other:${EdgeRuntime}`,
|
||||
'X-Stainless-Runtime': 'edge',
|
||||
'X-Stainless-Runtime-Version': globalThis.process.version,
|
||||
};
|
||||
}
|
||||
// Check if Node.js
|
||||
if (detectedPlatform === 'node') {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': normalizePlatform(globalThis.process.platform ?? 'unknown'),
|
||||
'X-Stainless-Arch': normalizeArch(globalThis.process.arch ?? 'unknown'),
|
||||
'X-Stainless-Runtime': 'node',
|
||||
'X-Stainless-Runtime-Version': globalThis.process.version ?? 'unknown',
|
||||
};
|
||||
}
|
||||
const browserInfo = getBrowserInfo();
|
||||
if (browserInfo) {
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': 'unknown',
|
||||
'X-Stainless-Runtime': `browser:${browserInfo.browser}`,
|
||||
'X-Stainless-Runtime-Version': browserInfo.version,
|
||||
};
|
||||
}
|
||||
// TODO add support for Cloudflare workers, etc.
|
||||
return {
|
||||
'X-Stainless-Lang': 'js',
|
||||
'X-Stainless-Package-Version': VERSION,
|
||||
'X-Stainless-OS': 'Unknown',
|
||||
'X-Stainless-Arch': 'unknown',
|
||||
'X-Stainless-Runtime': 'unknown',
|
||||
'X-Stainless-Runtime-Version': 'unknown',
|
||||
};
|
||||
};
|
||||
// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts
|
||||
function getBrowserInfo() {
|
||||
if (typeof navigator === 'undefined' || !navigator) {
|
||||
return null;
|
||||
}
|
||||
// NOTE: The order matters here!
|
||||
const browserPatterns = [
|
||||
{ key: 'edge', pattern: /Edge(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'ie', pattern: /MSIE(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'ie', pattern: /Trident(?:.*rv\:(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'chrome', pattern: /Chrome(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'firefox', pattern: /Firefox(?:\W+(\d+)\.(\d+)(?:\.(\d+))?)?/ },
|
||||
{ key: 'safari', pattern: /(?:Version\W+(\d+)\.(\d+)(?:\.(\d+))?)?(?:\W+Mobile\S*)?\W+Safari/ },
|
||||
];
|
||||
// Find the FIRST matching browser
|
||||
for (const { key, pattern } of browserPatterns) {
|
||||
const match = pattern.exec(navigator.userAgent);
|
||||
if (match) {
|
||||
const major = match[1] || 0;
|
||||
const minor = match[2] || 0;
|
||||
const patch = match[3] || 0;
|
||||
return { browser: key, version: `${major}.${minor}.${patch}` };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const normalizeArch = (arch) => {
|
||||
// Node docs:
|
||||
// - https://nodejs.org/api/process.html#processarch
|
||||
// Deno docs:
|
||||
// - https://doc.deno.land/deno/stable/~/Deno.build
|
||||
if (arch === 'x32')
|
||||
return 'x32';
|
||||
if (arch === 'x86_64' || arch === 'x64')
|
||||
return 'x64';
|
||||
if (arch === 'arm')
|
||||
return 'arm';
|
||||
if (arch === 'aarch64' || arch === 'arm64')
|
||||
return 'arm64';
|
||||
if (arch)
|
||||
return `other:${arch}`;
|
||||
return 'unknown';
|
||||
};
|
||||
const normalizePlatform = (platform) => {
|
||||
// Node platforms:
|
||||
// - https://nodejs.org/api/process.html#processplatform
|
||||
// Deno platforms:
|
||||
// - https://doc.deno.land/deno/stable/~/Deno.build
|
||||
// - https://github.com/denoland/deno/issues/14799
|
||||
platform = platform.toLowerCase();
|
||||
// NOTE: this iOS check is untested and may not work
|
||||
// Node does not work natively on IOS, there is a fork at
|
||||
// https://github.com/nodejs-mobile/nodejs-mobile
|
||||
// however it is unknown at the time of writing how to detect if it is running
|
||||
if (platform.includes('ios'))
|
||||
return 'iOS';
|
||||
if (platform === 'android')
|
||||
return 'Android';
|
||||
if (platform === 'darwin')
|
||||
return 'MacOS';
|
||||
if (platform === 'win32')
|
||||
return 'Windows';
|
||||
if (platform === 'freebsd')
|
||||
return 'FreeBSD';
|
||||
if (platform === 'openbsd')
|
||||
return 'OpenBSD';
|
||||
if (platform === 'linux')
|
||||
return 'Linux';
|
||||
if (platform)
|
||||
return `Other:${platform}`;
|
||||
return 'Unknown';
|
||||
};
|
||||
let _platformHeaders;
|
||||
export const getPlatformHeaders = () => {
|
||||
return (_platformHeaders ?? (_platformHeaders = getPlatformProperties()));
|
||||
};
|
||||
//# sourceMappingURL=detect-platform.mjs.map
|
||||
36
extracted-source/node_modules/@anthropic-ai/sdk/internal/errors.mjs
generated
vendored
Normal file
36
extracted-source/node_modules/@anthropic-ai/sdk/internal/errors.mjs
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function isAbortError(err) {
|
||||
return (typeof err === 'object' &&
|
||||
err !== null &&
|
||||
// Spec-compliant fetch implementations
|
||||
(('name' in err && err.name === 'AbortError') ||
|
||||
// Expo fetch
|
||||
('message' in err && String(err.message).includes('FetchRequestCanceledException'))));
|
||||
}
|
||||
export const castToError = (err) => {
|
||||
if (err instanceof Error)
|
||||
return err;
|
||||
if (typeof err === 'object' && err !== null) {
|
||||
try {
|
||||
if (Object.prototype.toString.call(err) === '[object Error]') {
|
||||
// @ts-ignore - not all envs have native support for cause yet
|
||||
const error = new Error(err.message, err.cause ? { cause: err.cause } : {});
|
||||
if (err.stack)
|
||||
error.stack = err.stack;
|
||||
// @ts-ignore - not all envs have native support for cause yet
|
||||
if (err.cause && !error.cause)
|
||||
error.cause = err.cause;
|
||||
if (err.name)
|
||||
error.name = err.name;
|
||||
return error;
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
return new Error(JSON.stringify(err));
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
return new Error(err);
|
||||
};
|
||||
//# sourceMappingURL=errors.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/headers.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/headers.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { isReadonlyArray } from "./utils/values.mjs";
|
||||
const brand_privateNullableHeaders = Symbol.for('brand.privateNullableHeaders');
|
||||
function* iterateHeaders(headers) {
|
||||
if (!headers)
|
||||
return;
|
||||
if (brand_privateNullableHeaders in headers) {
|
||||
const { values, nulls } = headers;
|
||||
yield* values.entries();
|
||||
for (const name of nulls) {
|
||||
yield [name, null];
|
||||
}
|
||||
return;
|
||||
}
|
||||
let shouldClear = false;
|
||||
let iter;
|
||||
if (headers instanceof Headers) {
|
||||
iter = headers.entries();
|
||||
}
|
||||
else if (isReadonlyArray(headers)) {
|
||||
iter = headers;
|
||||
}
|
||||
else {
|
||||
shouldClear = true;
|
||||
iter = Object.entries(headers ?? {});
|
||||
}
|
||||
for (let row of iter) {
|
||||
const name = row[0];
|
||||
if (typeof name !== 'string')
|
||||
throw new TypeError('expected header name to be a string');
|
||||
const values = isReadonlyArray(row[1]) ? row[1] : [row[1]];
|
||||
let didClear = false;
|
||||
for (const value of values) {
|
||||
if (value === undefined)
|
||||
continue;
|
||||
// Objects keys always overwrite older headers, they never append.
|
||||
// Yield a null to clear the header before adding the new values.
|
||||
if (shouldClear && !didClear) {
|
||||
didClear = true;
|
||||
yield [name, null];
|
||||
}
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
}
|
||||
export const buildHeaders = (newHeaders) => {
|
||||
const targetHeaders = new Headers();
|
||||
const nullHeaders = new Set();
|
||||
for (const headers of newHeaders) {
|
||||
const seenHeaders = new Set();
|
||||
for (const [name, value] of iterateHeaders(headers)) {
|
||||
const lowerName = name.toLowerCase();
|
||||
if (!seenHeaders.has(lowerName)) {
|
||||
targetHeaders.delete(name);
|
||||
seenHeaders.add(lowerName);
|
||||
}
|
||||
if (value === null) {
|
||||
targetHeaders.delete(name);
|
||||
nullHeaders.add(lowerName);
|
||||
}
|
||||
else {
|
||||
targetHeaders.append(name, value);
|
||||
nullHeaders.delete(lowerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { [brand_privateNullableHeaders]: true, values: targetHeaders, nulls: nullHeaders };
|
||||
};
|
||||
export const isEmptyHeaders = (headers) => {
|
||||
for (const _ of iterateHeaders(headers))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
//# sourceMappingURL=headers.mjs.map
|
||||
56
extracted-source/node_modules/@anthropic-ai/sdk/internal/parse.mjs
generated
vendored
Normal file
56
extracted-source/node_modules/@anthropic-ai/sdk/internal/parse.mjs
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { Stream } from "../core/streaming.mjs";
|
||||
import { formatRequestDetails, loggerFor } from "./utils/log.mjs";
|
||||
export async function defaultParseResponse(client, props) {
|
||||
const { response, requestLogID, retryOfRequestLogID, startTime } = props;
|
||||
const body = await (async () => {
|
||||
if (props.options.stream) {
|
||||
loggerFor(client).debug('response', response.status, response.url, response.headers, response.body);
|
||||
// Note: there is an invariant here that isn't represented in the type system
|
||||
// that if you set `stream: true` the response type must also be `Stream<T>`
|
||||
if (props.options.__streamClass) {
|
||||
return props.options.__streamClass.fromSSEResponse(response, props.controller);
|
||||
}
|
||||
return Stream.fromSSEResponse(response, props.controller);
|
||||
}
|
||||
// fetch refuses to read the body when the status code is 204.
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
if (props.options.__binaryResponse) {
|
||||
return response;
|
||||
}
|
||||
const contentType = response.headers.get('content-type');
|
||||
const mediaType = contentType?.split(';')[0]?.trim();
|
||||
const isJSON = mediaType?.includes('application/json') || mediaType?.endsWith('+json');
|
||||
if (isJSON) {
|
||||
const contentLength = response.headers.get('content-length');
|
||||
if (contentLength === '0') {
|
||||
// if there is no content we can't do anything
|
||||
return undefined;
|
||||
}
|
||||
const json = await response.json();
|
||||
return addRequestID(json, response);
|
||||
}
|
||||
const text = await response.text();
|
||||
return text;
|
||||
})();
|
||||
loggerFor(client).debug(`[${requestLogID}] response parsed`, formatRequestDetails({
|
||||
retryOfRequestLogID,
|
||||
url: response.url,
|
||||
status: response.status,
|
||||
body,
|
||||
durationMs: Date.now() - startTime,
|
||||
}));
|
||||
return body;
|
||||
}
|
||||
export function addRequestID(value, response) {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
return Object.defineProperty(value, '_request_id', {
|
||||
value: response.headers.get('request-id'),
|
||||
enumerable: false,
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=parse.mjs.map
|
||||
10
extracted-source/node_modules/@anthropic-ai/sdk/internal/request-options.mjs
generated
vendored
Normal file
10
extracted-source/node_modules/@anthropic-ai/sdk/internal/request-options.mjs
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export const FallbackEncoder = ({ headers, body }) => {
|
||||
return {
|
||||
bodyHeaders: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
};
|
||||
};
|
||||
//# sourceMappingURL=request-options.mjs.map
|
||||
85
extracted-source/node_modules/@anthropic-ai/sdk/internal/shims.mjs
generated
vendored
Normal file
85
extracted-source/node_modules/@anthropic-ai/sdk/internal/shims.mjs
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export function getDefaultFetch() {
|
||||
if (typeof fetch !== 'undefined') {
|
||||
return fetch;
|
||||
}
|
||||
throw new Error('`fetch` is not defined as a global; Either pass `fetch` to the client, `new Anthropic({ fetch })` or polyfill the global, `globalThis.fetch = fetch`');
|
||||
}
|
||||
export function makeReadableStream(...args) {
|
||||
const ReadableStream = globalThis.ReadableStream;
|
||||
if (typeof ReadableStream === 'undefined') {
|
||||
// Note: All of the platforms / runtimes we officially support already define
|
||||
// `ReadableStream` as a global, so this should only ever be hit on unsupported runtimes.
|
||||
throw new Error('`ReadableStream` is not defined as a global; You will need to polyfill it, `globalThis.ReadableStream = ReadableStream`');
|
||||
}
|
||||
return new ReadableStream(...args);
|
||||
}
|
||||
export function ReadableStreamFrom(iterable) {
|
||||
let iter = Symbol.asyncIterator in iterable ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator]();
|
||||
return makeReadableStream({
|
||||
start() { },
|
||||
async pull(controller) {
|
||||
const { done, value } = await iter.next();
|
||||
if (done) {
|
||||
controller.close();
|
||||
}
|
||||
else {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await iter.return?.();
|
||||
},
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Most browsers don't yet have async iterable support for ReadableStream,
|
||||
* and Node has a very different way of reading bytes from its "ReadableStream".
|
||||
*
|
||||
* This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490
|
||||
*/
|
||||
export function ReadableStreamToAsyncIterable(stream) {
|
||||
if (stream[Symbol.asyncIterator])
|
||||
return stream;
|
||||
const reader = stream.getReader();
|
||||
return {
|
||||
async next() {
|
||||
try {
|
||||
const result = await reader.read();
|
||||
if (result?.done)
|
||||
reader.releaseLock(); // release lock when stream becomes closed
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
reader.releaseLock(); // release lock when stream becomes errored
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async return() {
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
return { done: true, value: undefined };
|
||||
},
|
||||
[Symbol.asyncIterator]() {
|
||||
return this;
|
||||
},
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Cancels a ReadableStream we don't need to consume.
|
||||
* See https://undici.nodejs.org/#/?id=garbage-collection
|
||||
*/
|
||||
export async function CancelReadableStream(stream) {
|
||||
if (stream === null || typeof stream !== 'object')
|
||||
return;
|
||||
if (stream[Symbol.asyncIterator]) {
|
||||
await stream[Symbol.asyncIterator]().return?.();
|
||||
return;
|
||||
}
|
||||
const reader = stream.getReader();
|
||||
const cancelPromise = reader.cancel();
|
||||
reader.releaseLock();
|
||||
await cancelPromise;
|
||||
}
|
||||
//# sourceMappingURL=shims.mjs.map
|
||||
93
extracted-source/node_modules/@anthropic-ai/sdk/internal/to-file.mjs
generated
vendored
Normal file
93
extracted-source/node_modules/@anthropic-ai/sdk/internal/to-file.mjs
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
import { getName, makeFile, isAsyncIterable } from "./uploads.mjs";
|
||||
import { checkFileSupport } from "./uploads.mjs";
|
||||
/**
|
||||
* This check adds the arrayBuffer() method type because it is available and used at runtime
|
||||
*/
|
||||
const isBlobLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.size === 'number' &&
|
||||
typeof value.type === 'string' &&
|
||||
typeof value.text === 'function' &&
|
||||
typeof value.slice === 'function' &&
|
||||
typeof value.arrayBuffer === 'function';
|
||||
/**
|
||||
* This check adds the arrayBuffer() method type because it is available and used at runtime
|
||||
*/
|
||||
const isFileLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.name === 'string' &&
|
||||
typeof value.lastModified === 'number' &&
|
||||
isBlobLike(value);
|
||||
const isResponseLike = (value) => value != null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.url === 'string' &&
|
||||
typeof value.blob === 'function';
|
||||
/**
|
||||
* Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats
|
||||
* @param value the raw content of the file. Can be an {@link Uploadable}, BlobLikePart, or AsyncIterable of BlobLikeParts
|
||||
* @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible
|
||||
* @param {Object=} options additional properties
|
||||
* @param {string=} options.type the MIME type of the content
|
||||
* @param {number=} options.lastModified the last modified timestamp
|
||||
* @returns a {@link File} with the given properties
|
||||
*/
|
||||
export async function toFile(value, name, options) {
|
||||
checkFileSupport();
|
||||
// If it's a promise, resolve it.
|
||||
value = await value;
|
||||
name || (name = getName(value, true));
|
||||
// If we've been given a `File` we don't need to do anything if the name / options
|
||||
// have not been customised.
|
||||
if (isFileLike(value)) {
|
||||
if (value instanceof File && name == null && options == null) {
|
||||
return value;
|
||||
}
|
||||
return makeFile([await value.arrayBuffer()], name ?? value.name, {
|
||||
type: value.type,
|
||||
lastModified: value.lastModified,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
if (isResponseLike(value)) {
|
||||
const blob = await value.blob();
|
||||
name || (name = new URL(value.url).pathname.split(/[\\/]/).pop());
|
||||
return makeFile(await getBytes(blob), name, options);
|
||||
}
|
||||
const parts = await getBytes(value);
|
||||
if (!options?.type) {
|
||||
const type = parts.find((part) => typeof part === 'object' && 'type' in part && part.type);
|
||||
if (typeof type === 'string') {
|
||||
options = { ...options, type };
|
||||
}
|
||||
}
|
||||
return makeFile(parts, name, options);
|
||||
}
|
||||
async function getBytes(value) {
|
||||
let parts = [];
|
||||
if (typeof value === 'string' ||
|
||||
ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc.
|
||||
value instanceof ArrayBuffer) {
|
||||
parts.push(value);
|
||||
}
|
||||
else if (isBlobLike(value)) {
|
||||
parts.push(value instanceof Blob ? value : await value.arrayBuffer());
|
||||
}
|
||||
else if (isAsyncIterable(value) // includes Readable, ReadableStream, etc.
|
||||
) {
|
||||
for await (const chunk of value) {
|
||||
parts.push(...(await getBytes(chunk))); // TODO, consider validating?
|
||||
}
|
||||
}
|
||||
else {
|
||||
const constructor = value?.constructor?.name;
|
||||
throw new Error(`Unexpected data type: ${typeof value}${constructor ? `; constructor: ${constructor}` : ''}${propsForError(value)}`);
|
||||
}
|
||||
return parts;
|
||||
}
|
||||
function propsForError(value) {
|
||||
if (typeof value !== 'object' || value === null)
|
||||
return '';
|
||||
const props = Object.getOwnPropertyNames(value);
|
||||
return `; props: [${props.map((p) => `"${p}"`).join(', ')}]`;
|
||||
}
|
||||
//# sourceMappingURL=to-file.mjs.map
|
||||
17
extracted-source/node_modules/@anthropic-ai/sdk/internal/tslib.mjs
generated
vendored
Normal file
17
extracted-source/node_modules/@anthropic-ai/sdk/internal/tslib.mjs
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
function __classPrivateFieldSet(receiver, state, value, kind, f) {
|
||||
if (kind === "m")
|
||||
throw new TypeError("Private method is not writable");
|
||||
if (kind === "a" && !f)
|
||||
throw new TypeError("Private accessor was defined without a setter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
||||
throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||||
return kind === "a" ? f.call(receiver, value) : f ? (f.value = value) : state.set(receiver, value), value;
|
||||
}
|
||||
function __classPrivateFieldGet(receiver, state, kind, f) {
|
||||
if (kind === "a" && !f)
|
||||
throw new TypeError("Private accessor was defined without a getter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
||||
throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||||
}
|
||||
export { __classPrivateFieldSet, __classPrivateFieldGet };
|
||||
135
extracted-source/node_modules/@anthropic-ai/sdk/internal/uploads.mjs
generated
vendored
Normal file
135
extracted-source/node_modules/@anthropic-ai/sdk/internal/uploads.mjs
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
import { ReadableStreamFrom } from "./shims.mjs";
|
||||
export const checkFileSupport = () => {
|
||||
if (typeof File === 'undefined') {
|
||||
const { process } = globalThis;
|
||||
const isOldNode = typeof process?.versions?.node === 'string' && parseInt(process.versions.node.split('.')) < 20;
|
||||
throw new Error('`File` is not defined as a global, which is required for file uploads.' +
|
||||
(isOldNode ?
|
||||
" Update to Node 20 LTS or newer, or set `globalThis.File` to `import('node:buffer').File`."
|
||||
: ''));
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Construct a `File` instance. This is used to ensure a helpful error is thrown
|
||||
* for environments that don't define a global `File` yet.
|
||||
*/
|
||||
export function makeFile(fileBits, fileName, options) {
|
||||
checkFileSupport();
|
||||
return new File(fileBits, fileName ?? 'unknown_file', options);
|
||||
}
|
||||
export function getName(value, stripPath) {
|
||||
const val = (typeof value === 'object' &&
|
||||
value !== null &&
|
||||
(('name' in value && value.name && String(value.name)) ||
|
||||
('url' in value && value.url && String(value.url)) ||
|
||||
('filename' in value && value.filename && String(value.filename)) ||
|
||||
('path' in value && value.path && String(value.path)))) ||
|
||||
'';
|
||||
return stripPath ? val.split(/[\\/]/).pop() || undefined : val;
|
||||
}
|
||||
export const isAsyncIterable = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function';
|
||||
/**
|
||||
* Returns a multipart/form-data request if any part of the given request body contains a File / Blob value.
|
||||
* Otherwise returns the request as is.
|
||||
*/
|
||||
export const maybeMultipartFormRequestOptions = async (opts, fetch) => {
|
||||
if (!hasUploadableValue(opts.body))
|
||||
return opts;
|
||||
return { ...opts, body: await createForm(opts.body, fetch) };
|
||||
};
|
||||
export const multipartFormRequestOptions = async (opts, fetch, stripFilenames = true) => {
|
||||
return { ...opts, body: await createForm(opts.body, fetch, stripFilenames) };
|
||||
};
|
||||
const supportsFormDataMap = /* @__PURE__ */ new WeakMap();
|
||||
/**
|
||||
* node-fetch doesn't support the global FormData object in recent node versions. Instead of sending
|
||||
* properly-encoded form data, it just stringifies the object, resulting in a request body of "[object FormData]".
|
||||
* This function detects if the fetch function provided supports the global FormData object to avoid
|
||||
* confusing error messages later on.
|
||||
*/
|
||||
function supportsFormData(fetchObject) {
|
||||
const fetch = typeof fetchObject === 'function' ? fetchObject : fetchObject.fetch;
|
||||
const cached = supportsFormDataMap.get(fetch);
|
||||
if (cached)
|
||||
return cached;
|
||||
const promise = (async () => {
|
||||
try {
|
||||
const FetchResponse = ('Response' in fetch ?
|
||||
fetch.Response
|
||||
: (await fetch('data:,')).constructor);
|
||||
const data = new FormData();
|
||||
if (data.toString() === (await new FetchResponse(data).text())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
// avoid false negatives
|
||||
return true;
|
||||
}
|
||||
})();
|
||||
supportsFormDataMap.set(fetch, promise);
|
||||
return promise;
|
||||
}
|
||||
export const createForm = async (body, fetch, stripFilenames = true) => {
|
||||
if (!(await supportsFormData(fetch))) {
|
||||
throw new TypeError('The provided fetch function does not support file uploads with the current global FormData class.');
|
||||
}
|
||||
const form = new FormData();
|
||||
await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value, stripFilenames)));
|
||||
return form;
|
||||
};
|
||||
// We check for Blob not File because Bun.File doesn't inherit from File,
|
||||
// but they both inherit from Blob and have a `name` property at runtime.
|
||||
const isNamedBlob = (value) => value instanceof Blob && 'name' in value;
|
||||
const isUploadable = (value) => typeof value === 'object' &&
|
||||
value !== null &&
|
||||
(value instanceof Response || isAsyncIterable(value) || isNamedBlob(value));
|
||||
const hasUploadableValue = (value) => {
|
||||
if (isUploadable(value))
|
||||
return true;
|
||||
if (Array.isArray(value))
|
||||
return value.some(hasUploadableValue);
|
||||
if (value && typeof value === 'object') {
|
||||
for (const k in value) {
|
||||
if (hasUploadableValue(value[k]))
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const addFormValue = async (form, key, value, stripFilenames) => {
|
||||
if (value === undefined)
|
||||
return;
|
||||
if (value == null) {
|
||||
throw new TypeError(`Received null for "${key}"; to pass null in FormData, you must use the string 'null'`);
|
||||
}
|
||||
// TODO: make nested formats configurable
|
||||
if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {
|
||||
form.append(key, String(value));
|
||||
}
|
||||
else if (value instanceof Response) {
|
||||
let options = {};
|
||||
const contentType = value.headers.get('Content-Type');
|
||||
if (contentType) {
|
||||
options = { type: contentType };
|
||||
}
|
||||
form.append(key, makeFile([await value.blob()], getName(value, stripFilenames), options));
|
||||
}
|
||||
else if (isAsyncIterable(value)) {
|
||||
form.append(key, makeFile([await new Response(ReadableStreamFrom(value)).blob()], getName(value, stripFilenames)));
|
||||
}
|
||||
else if (isNamedBlob(value)) {
|
||||
form.append(key, makeFile([value], getName(value, stripFilenames), { type: value.type }));
|
||||
}
|
||||
else if (Array.isArray(value)) {
|
||||
await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry, stripFilenames)));
|
||||
}
|
||||
else if (typeof value === 'object') {
|
||||
await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop, stripFilenames)));
|
||||
}
|
||||
else {
|
||||
throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`);
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=uploads.mjs.map
|
||||
26
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/bytes.mjs
generated
vendored
Normal file
26
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/bytes.mjs
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
export function concatBytes(buffers) {
|
||||
let length = 0;
|
||||
for (const buffer of buffers) {
|
||||
length += buffer.length;
|
||||
}
|
||||
const output = new Uint8Array(length);
|
||||
let index = 0;
|
||||
for (const buffer of buffers) {
|
||||
output.set(buffer, index);
|
||||
index += buffer.length;
|
||||
}
|
||||
return output;
|
||||
}
|
||||
let encodeUTF8_;
|
||||
export function encodeUTF8(str) {
|
||||
let encoder;
|
||||
return (encodeUTF8_ ??
|
||||
((encoder = new globalThis.TextEncoder()), (encodeUTF8_ = encoder.encode.bind(encoder))))(str);
|
||||
}
|
||||
let decodeUTF8_;
|
||||
export function decodeUTF8(bytes) {
|
||||
let decoder;
|
||||
return (decodeUTF8_ ??
|
||||
((decoder = new globalThis.TextDecoder()), (decodeUTF8_ = decoder.decode.bind(decoder))))(bytes);
|
||||
}
|
||||
//# sourceMappingURL=bytes.mjs.map
|
||||
18
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/env.mjs
generated
vendored
Normal file
18
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/env.mjs
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* Read an environment variable.
|
||||
*
|
||||
* Trims beginning and trailing whitespace.
|
||||
*
|
||||
* Will return undefined if the environment variable doesn't exist or cannot be accessed.
|
||||
*/
|
||||
export const readEnv = (env) => {
|
||||
if (typeof globalThis.process !== 'undefined') {
|
||||
return globalThis.process.env?.[env]?.trim() ?? undefined;
|
||||
}
|
||||
if (typeof globalThis.Deno !== 'undefined') {
|
||||
return globalThis.Deno.env?.get?.(env)?.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
//# sourceMappingURL=env.mjs.map
|
||||
80
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/log.mjs
generated
vendored
Normal file
80
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/log.mjs
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { hasOwn } from "./values.mjs";
|
||||
const levelNumbers = {
|
||||
off: 0,
|
||||
error: 200,
|
||||
warn: 300,
|
||||
info: 400,
|
||||
debug: 500,
|
||||
};
|
||||
export const parseLogLevel = (maybeLevel, sourceName, client) => {
|
||||
if (!maybeLevel) {
|
||||
return undefined;
|
||||
}
|
||||
if (hasOwn(levelNumbers, maybeLevel)) {
|
||||
return maybeLevel;
|
||||
}
|
||||
loggerFor(client).warn(`${sourceName} was set to ${JSON.stringify(maybeLevel)}, expected one of ${JSON.stringify(Object.keys(levelNumbers))}`);
|
||||
return undefined;
|
||||
};
|
||||
function noop() { }
|
||||
function makeLogFn(fnLevel, logger, logLevel) {
|
||||
if (!logger || levelNumbers[fnLevel] > levelNumbers[logLevel]) {
|
||||
return noop;
|
||||
}
|
||||
else {
|
||||
// Don't wrap logger functions, we want the stacktrace intact!
|
||||
return logger[fnLevel].bind(logger);
|
||||
}
|
||||
}
|
||||
const noopLogger = {
|
||||
error: noop,
|
||||
warn: noop,
|
||||
info: noop,
|
||||
debug: noop,
|
||||
};
|
||||
let cachedLoggers = /* @__PURE__ */ new WeakMap();
|
||||
export function loggerFor(client) {
|
||||
const logger = client.logger;
|
||||
const logLevel = client.logLevel ?? 'off';
|
||||
if (!logger) {
|
||||
return noopLogger;
|
||||
}
|
||||
const cachedLogger = cachedLoggers.get(logger);
|
||||
if (cachedLogger && cachedLogger[0] === logLevel) {
|
||||
return cachedLogger[1];
|
||||
}
|
||||
const levelLogger = {
|
||||
error: makeLogFn('error', logger, logLevel),
|
||||
warn: makeLogFn('warn', logger, logLevel),
|
||||
info: makeLogFn('info', logger, logLevel),
|
||||
debug: makeLogFn('debug', logger, logLevel),
|
||||
};
|
||||
cachedLoggers.set(logger, [logLevel, levelLogger]);
|
||||
return levelLogger;
|
||||
}
|
||||
export const formatRequestDetails = (details) => {
|
||||
if (details.options) {
|
||||
details.options = { ...details.options };
|
||||
delete details.options['headers']; // redundant + leaks internals
|
||||
}
|
||||
if (details.headers) {
|
||||
details.headers = Object.fromEntries((details.headers instanceof Headers ? [...details.headers] : Object.entries(details.headers)).map(([name, value]) => [
|
||||
name,
|
||||
(name.toLowerCase() === 'x-api-key' ||
|
||||
name.toLowerCase() === 'authorization' ||
|
||||
name.toLowerCase() === 'cookie' ||
|
||||
name.toLowerCase() === 'set-cookie') ?
|
||||
'***'
|
||||
: value,
|
||||
]));
|
||||
}
|
||||
if ('retryOfRequestLogID' in details) {
|
||||
if (details.retryOfRequestLogID) {
|
||||
details.retryOf = details.retryOfRequestLogID;
|
||||
}
|
||||
delete details.retryOfRequestLogID;
|
||||
}
|
||||
return details;
|
||||
};
|
||||
//# sourceMappingURL=log.mjs.map
|
||||
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/path.mjs
generated
vendored
Normal file
74
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/path.mjs
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
/**
|
||||
* Percent-encode everything that isn't safe to have in a path without encoding safe chars.
|
||||
*
|
||||
* Taken from https://datatracker.ietf.org/doc/html/rfc3986#section-3.3:
|
||||
* > unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
|
||||
* > sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
|
||||
* > pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
|
||||
*/
|
||||
export function encodeURIPath(str) {
|
||||
return str.replace(/[^A-Za-z0-9\-._~!$&'()*+,;=:@]+/g, encodeURIComponent);
|
||||
}
|
||||
const EMPTY = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.create(null));
|
||||
export const createPathTagFunction = (pathEncoder = encodeURIPath) => function path(statics, ...params) {
|
||||
// If there are no params, no processing is needed.
|
||||
if (statics.length === 1)
|
||||
return statics[0];
|
||||
let postPath = false;
|
||||
const invalidSegments = [];
|
||||
const path = statics.reduce((previousValue, currentValue, index) => {
|
||||
if (/[?#]/.test(currentValue)) {
|
||||
postPath = true;
|
||||
}
|
||||
const value = params[index];
|
||||
let encoded = (postPath ? encodeURIComponent : pathEncoder)('' + value);
|
||||
if (index !== params.length &&
|
||||
(value == null ||
|
||||
(typeof value === 'object' &&
|
||||
// handle values from other realms
|
||||
value.toString ===
|
||||
Object.getPrototypeOf(Object.getPrototypeOf(value.hasOwnProperty ?? EMPTY) ?? EMPTY)
|
||||
?.toString))) {
|
||||
encoded = value + '';
|
||||
invalidSegments.push({
|
||||
start: previousValue.length + currentValue.length,
|
||||
length: encoded.length,
|
||||
error: `Value of type ${Object.prototype.toString
|
||||
.call(value)
|
||||
.slice(8, -1)} is not a valid path parameter`,
|
||||
});
|
||||
}
|
||||
return previousValue + currentValue + (index === params.length ? '' : encoded);
|
||||
}, '');
|
||||
const pathOnly = path.split(/[?#]/, 1)[0];
|
||||
const invalidSegmentPattern = /(?<=^|\/)(?:\.|%2e){1,2}(?=\/|$)/gi;
|
||||
let match;
|
||||
// Find all invalid segments
|
||||
while ((match = invalidSegmentPattern.exec(pathOnly)) !== null) {
|
||||
invalidSegments.push({
|
||||
start: match.index,
|
||||
length: match[0].length,
|
||||
error: `Value "${match[0]}" can\'t be safely passed as a path parameter`,
|
||||
});
|
||||
}
|
||||
invalidSegments.sort((a, b) => a.start - b.start);
|
||||
if (invalidSegments.length > 0) {
|
||||
let lastEnd = 0;
|
||||
const underline = invalidSegments.reduce((acc, segment) => {
|
||||
const spaces = ' '.repeat(segment.start - lastEnd);
|
||||
const arrows = '^'.repeat(segment.length);
|
||||
lastEnd = segment.start + segment.length;
|
||||
return acc + spaces + arrows;
|
||||
}, '');
|
||||
throw new AnthropicError(`Path parameters result in path with invalid segments:\n${invalidSegments
|
||||
.map((e) => e.error)
|
||||
.join('\n')}\n${path}\n${underline}`);
|
||||
}
|
||||
return path;
|
||||
};
|
||||
/**
|
||||
* URI-encodes path params and ensures no unsafe /./ or /../ path segments are introduced.
|
||||
*/
|
||||
export const path = /* @__PURE__ */ createPathTagFunction(encodeURIPath);
|
||||
//# sourceMappingURL=path.mjs.map
|
||||
3
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/sleep.mjs
generated
vendored
Normal file
3
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/sleep.mjs
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
//# sourceMappingURL=sleep.mjs.map
|
||||
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/uuid.mjs
generated
vendored
Normal file
15
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/uuid.mjs
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
/**
|
||||
* https://stackoverflow.com/a/2117523
|
||||
*/
|
||||
export let uuid4 = function () {
|
||||
const { crypto } = globalThis;
|
||||
if (crypto?.randomUUID) {
|
||||
uuid4 = crypto.randomUUID.bind(crypto);
|
||||
return crypto.randomUUID();
|
||||
}
|
||||
const u8 = new Uint8Array(1);
|
||||
const randomByte = crypto ? () => crypto.getRandomValues(u8)[0] : () => (Math.random() * 0xff) & 0xff;
|
||||
return '10000000-1000-4000-8000-100000000000'.replace(/[018]/g, (c) => (+c ^ (randomByte() & (15 >> (+c / 4)))).toString(16));
|
||||
};
|
||||
//# sourceMappingURL=uuid.mjs.map
|
||||
100
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/values.mjs
generated
vendored
Normal file
100
extracted-source/node_modules/@anthropic-ai/sdk/internal/utils/values.mjs
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
||||
import { AnthropicError } from "../../core/error.mjs";
|
||||
// https://url.spec.whatwg.org/#url-scheme-string
|
||||
const startsWithSchemeRegexp = /^[a-z][a-z0-9+.-]*:/i;
|
||||
export const isAbsoluteURL = (url) => {
|
||||
return startsWithSchemeRegexp.test(url);
|
||||
};
|
||||
export let isArray = (val) => ((isArray = Array.isArray), isArray(val));
|
||||
export let isReadonlyArray = isArray;
|
||||
/** Returns an object if the given value isn't an object, otherwise returns as-is */
|
||||
export function maybeObj(x) {
|
||||
if (typeof x !== 'object') {
|
||||
return {};
|
||||
}
|
||||
return x ?? {};
|
||||
}
|
||||
// https://stackoverflow.com/a/34491287
|
||||
export function isEmptyObj(obj) {
|
||||
if (!obj)
|
||||
return true;
|
||||
for (const _k in obj)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
// https://eslint.org/docs/latest/rules/no-prototype-builtins
|
||||
export function hasOwn(obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
export function isObj(obj) {
|
||||
return obj != null && typeof obj === 'object' && !Array.isArray(obj);
|
||||
}
|
||||
export const ensurePresent = (value) => {
|
||||
if (value == null) {
|
||||
throw new AnthropicError(`Expected a value to be given but received ${value} instead.`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
export const validatePositiveInteger = (name, n) => {
|
||||
if (typeof n !== 'number' || !Number.isInteger(n)) {
|
||||
throw new AnthropicError(`${name} must be an integer`);
|
||||
}
|
||||
if (n < 0) {
|
||||
throw new AnthropicError(`${name} must be a positive integer`);
|
||||
}
|
||||
return n;
|
||||
};
|
||||
export const coerceInteger = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return Math.round(value);
|
||||
if (typeof value === 'string')
|
||||
return parseInt(value, 10);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceFloat = (value) => {
|
||||
if (typeof value === 'number')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return parseFloat(value);
|
||||
throw new AnthropicError(`Could not coerce ${value} (type: ${typeof value}) into a number`);
|
||||
};
|
||||
export const coerceBoolean = (value) => {
|
||||
if (typeof value === 'boolean')
|
||||
return value;
|
||||
if (typeof value === 'string')
|
||||
return value === 'true';
|
||||
return Boolean(value);
|
||||
};
|
||||
export const maybeCoerceInteger = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceInteger(value);
|
||||
};
|
||||
export const maybeCoerceFloat = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceFloat(value);
|
||||
};
|
||||
export const maybeCoerceBoolean = (value) => {
|
||||
if (value == null) {
|
||||
return undefined;
|
||||
}
|
||||
return coerceBoolean(value);
|
||||
};
|
||||
export const safeJSON = (text) => {
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
// Gets a value from an object, deletes the key, and returns the value (or undefined if not found)
|
||||
export const pop = (obj, key) => {
|
||||
const value = obj[key];
|
||||
delete obj[key];
|
||||
return value;
|
||||
};
|
||||
//# sourceMappingURL=values.mjs.map
|
||||
Reference in New Issue
Block a user